repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
anwfr/XChange | xchange-btcchina/src/main/java/org/knowm/xchange/btcchina/dto/trade/BTCChinaIcebergOrder.java | 2045 | package org.knowm.xchange.btcchina.dto.trade;
import java.math.BigDecimal;
import com.fasterxml.jackson.annotation.JsonProperty;
public class BTCChinaIcebergOrder {
private final int id;
private final String type;
private final BigDecimal price;
private final String market;
private final BigDecimal amount;
private final BigDecimal amountOriginal;
private final BigDecimal disclosedAmount;
private final BigDecimal variance;
private final long date;
private final String status;
private final BTCChinaOrder[] orders;
public BTCChinaIcebergOrder(@JsonProperty("id") int id, @JsonProperty("type") String type, @JsonProperty("price") BigDecimal price,
@JsonProperty("market") String market, @JsonProperty("amount") BigDecimal amount, @JsonProperty("amount_original") BigDecimal amountOriginal,
@JsonProperty("disclosed_amount") BigDecimal disclosedAmount, @JsonProperty("variance") BigDecimal variance, @JsonProperty("date") long date,
@JsonProperty("status") String status, @JsonProperty("order") BTCChinaOrder[] orders) {
super();
this.id = id;
this.type = type;
this.price = price;
this.market = market;
this.amount = amount;
this.amountOriginal = amountOriginal;
this.disclosedAmount = disclosedAmount;
this.variance = variance;
this.date = date;
this.status = status;
this.orders = orders;
}
public int getId() {
return id;
}
public String getType() {
return type;
}
public BigDecimal getPrice() {
return price;
}
public String getMarket() {
return market;
}
public BigDecimal getAmount() {
return amount;
}
public BigDecimal getAmountOriginal() {
return amountOriginal;
}
public BigDecimal getDisclosedAmount() {
return disclosedAmount;
}
public BigDecimal getVariance() {
return variance;
}
public long getDate() {
return date;
}
public String getStatus() {
return status;
}
public BTCChinaOrder[] getOrders() {
return orders;
}
}
| mit |
jongerrish/robolectric | shadows/httpclient/src/main/java/org/robolectric/shadows/httpclient/ParamsParser.java | 1783 | package org.robolectric.shadows.httpclient;
import android.net.Uri;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.http.HttpEntity;
import org.apache.http.HttpRequest;
import org.apache.http.NameValuePair;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URLEncodedUtils;
public class ParamsParser {
public static Map<String, String> parseParams(HttpRequest request) {
if (request instanceof HttpGet) {
return parseParamsForGet(request);
}
if (request instanceof HttpEntityEnclosingRequestBase) {
return parseParamsForRequestWithEntity((HttpEntityEnclosingRequestBase) request);
}
return new LinkedHashMap<>();
}
private static Map<String, String> parseParamsForRequestWithEntity(HttpEntityEnclosingRequestBase request) {
try {
LinkedHashMap<String, String> map = new LinkedHashMap<>();
HttpEntity entity = request.getEntity();
if (entity != null) {
List<NameValuePair> pairs = URLEncodedUtils.parse(entity);
for (NameValuePair pair : pairs) {
map.put(pair.getName(), pair.getValue());
}
}
return map;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static Map<String, String> parseParamsForGet(HttpRequest request) {
Uri uri = Uri.parse(request.getRequestLine().getUri());
Set<String> paramNames = uri.getQueryParameterNames();
LinkedHashMap<String, String> map = new LinkedHashMap<>();
for (String paramName : paramNames) {
map.put(paramName, uri.getQueryParameter(paramName));
}
return map;
}
}
| mit |
feiyue/maven-framework-project | spring-cache-example/src/main/java/org/spring/cache/example/WebController.java | 600 | package org.spring.cache.example;
import java.util.HashMap;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
@Controller
public class WebController {
@Autowired
EmployeeService employeeService;
@RequestMapping("/index.htm")
public String homePage(@RequestParam(required = false) Integer id,
HashMap<String, String> map) {
map.put("message", employeeService.getEmployee(id));
return "index";
}
}
| mit |
williamClanton/singularity | weka/src/main/java/weka/classifiers/IntervalEstimator.java | 1852 | /*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
* IntervalEstimator.java
* Copyright (C) 2005 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers;
import weka.core.Instance;
/**
* Interface for classifiers that can output confidence intervals
*
* @author Kurt Driessens (kurtd@cs.waikato.ac.nz)
* @version $Revision: 1.2 $
*/
public interface IntervalEstimator {
/**
* Returns an N*2 array, where N is the number of possible classes, that estimate
* the boundaries for the confidence interval with a confidence level specified by
* the second parameter. Every row of the returned array gives the probability estimates
* for a single class. In the case of numeric predictions, a single confidance interval
* will be returned.
*
* @param inst the instance to make the prediction for.
* @param confidenceLevel the percentage of cases that the interval should cover.
* @return an array of confidance intervals (one for each class)
* @exception Exception if the intervals can't be computed
*/
double[][] predictInterval(Instance inst, double confidenceLevel) throws Exception;
}
| mit |
java8compiler/OpenTeleporter | src/main/java/li/cil/oc/api/manual/ContentProvider.java | 1155 | package li.cil.oc.api.manual;
/**
* This interface allows implementation of content providers for the manual.
* <p/>
* Content providers can be used to provide possibly dynamic page content for
* arbitrary paths. Note that content providers have <em>lower</em> priority
* than content found in resource packs, i.e. content providers will only be
* queried for missing pages, so to speak.
* <p/>
*
* @see li.cil.oc.api.prefab.ResourceContentProvider
*/
public interface ContentProvider {
/**
* Called to get the content of a path pointed to by the specified path.
* <p/>
* This should provide an iterable over the lines of a Markdown document
* (with the formatting provided by the in-game manual, which is a small
* subset of "normal" Markdown).
* <p/>
* If this provider cannot provide the requested path, it should return
* <tt>null</tt> to indicate so, allowing other providers to be queried.
*
* @param path the path to the manual page we're looking for.
* @return the content of the document at that path, or <tt>null</tt>.
*/
Iterable<String> getContent(String path);
}
| mit |
snjeza/che | wsagent/che-core-api-project/src/main/java/org/eclipse/che/api/project/server/ZipProjectImporter.java | 3709 | /*******************************************************************************
* Copyright (c) 2012-2017 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.api.project.server;
import org.eclipse.che.api.core.ConflictException;
import org.eclipse.che.api.core.ForbiddenException;
import org.eclipse.che.api.core.ServerException;
import org.eclipse.che.api.core.model.project.SourceStorage;
import org.eclipse.che.api.core.util.LineConsumerFactory;
import org.eclipse.che.api.project.server.importer.ProjectImporter;
import javax.inject.Singleton;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Map;
import static org.eclipse.che.api.project.shared.Constants.ZIP_IMPORTER_ID;
/**
* @author Vitaly Parfonov
*/
@Singleton
public class ZipProjectImporter implements ProjectImporter {
@Override
public String getId() {
return ZIP_IMPORTER_ID;
}
@Override
public boolean isInternal() {
return false;
}
@Override
public String getDescription() {
return "Import project from ZIP archive under a public URL.";
}
@Override
public void importSources(FolderEntry baseFolder, SourceStorage storage) throws ForbiddenException,
ConflictException,
IOException,
ServerException {
importSources(baseFolder, storage, LineConsumerFactory.NULL);
}
@Override
public void importSources(FolderEntry baseFolder,
SourceStorage storage,
LineConsumerFactory importOutputConsumerFactory) throws ForbiddenException,
ConflictException,
IOException,
ServerException {
URL url;
String location = storage.getLocation();
if (location.startsWith("http://") || location.startsWith("https://")) {
url = new URL(location);
} else {
url = Thread.currentThread().getContextClassLoader().getResource(location);
if (url == null) {
final java.io.File file = new java.io.File(location);
if (file.exists()) {
url = file.toURI().toURL();
}
}
}
if (url == null) {
throw new IOException(String.format("Can't find %s", location));
}
Map<String, String> parameters = storage.getParameters();
try (InputStream zip = url.openStream()) {
int stripNumber = 0;
if (parameters != null && parameters.containsKey("skipFirstLevel")) {
stripNumber = Boolean.parseBoolean(parameters.get("skipFirstLevel")) ? 1 : 0;
}
baseFolder.getVirtualFile().unzip(zip, true, stripNumber);
}
}
@Override
public ImporterCategory getCategory() {
return ImporterCategory.ARCHIVE;
}
}
| epl-1.0 |
amolenaar/fitnesse | src/fitnesse/wiki/PageType.java | 2095 | package fitnesse.wiki;
import static fitnesse.wiki.PageData.*;
public enum PageType {
SUITE("Suite") {
@Override
public boolean validForPageName(String pageName) {
return (pageName.startsWith(toString())
&& !pageName.equals(SUITE_SETUP_NAME) && !pageName.equals(SUITE_TEARDOWN_NAME))
|| pageName.endsWith(toString()) || pageName.endsWith("Examples");
}
},
TEST("Test") {
@Override
public boolean validForPageName(String pageName) {
return pageName.startsWith(toString())
|| pageName.endsWith(toString())
|| (pageName.startsWith("Example") && !pageName
.startsWith("Examples")) || pageName.endsWith("Example");
}
},
STATIC("Static") {
@Override
public boolean validForPageName(String pageName) {
return true;
}
};
public static PageType fromString(String typeDescriptor) {
for (PageType type: PageType.values()) {
if (type.description.equalsIgnoreCase(typeDescriptor)) {
return type;
}
}
throw new IllegalArgumentException("unknown page type descriptor: " + typeDescriptor);
}
public static PageType fromWikiPage(WikiPage page) {
PageData data = page.getData();
if (data.hasAttribute(SUITE.toString())) {
return SUITE;
}
if (data.hasAttribute(TEST.toString())) {
return TEST;
}
return STATIC;
}
public static PageType getPageTypeForPageName(String pageName) {
for (PageType type: values()) {
if (type.validForPageName(pageName))
return type;
}
return STATIC;
}
public static String [] valuesAsString(){
PageType [] ee = PageType.values();
String [] stringArray = new String [ee.length];
for (int i = 0; i < ee.length; i++) {
stringArray[i] = ee[i].toString();
}
return stringArray;
}
private String description;
PageType(String description) {
this.description = description;
}
@Override
public String toString() {
return description;
}
public abstract boolean validForPageName(String pageName);
}
| epl-1.0 |
md-5/jdk10 | src/java.naming/share/classes/com/sun/jndi/toolkit/ctx/ComponentContext.java | 29120 | /*
* Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.jndi.toolkit.ctx;
import javax.naming.*;
import javax.naming.spi.ResolveResult;
/**
* Provides implementation of p_* operations using
* c_* operations provided by subclasses.
*
* Clients: deal only with names for its own naming service. Must
* provide implementations for c_* methods, and for p_parseComponent()
* and the c_*_nns methods if the defaults are not appropriate.
*
* @author Rosanna Lee
* @author Scott Seligman
*/
public abstract class ComponentContext extends PartialCompositeContext {
private static int debug = 0;
protected ComponentContext() {
_contextType = _COMPONENT;
}
// ------ Abstract methods whose implementation are provided by subclass
/* Equivalent methods in Context interface */
protected abstract Object c_lookup(Name name, Continuation cont)
throws NamingException;
protected abstract Object c_lookupLink(Name name, Continuation cont)
throws NamingException;
protected abstract NamingEnumeration<NameClassPair> c_list(Name name,
Continuation cont) throws NamingException;
protected abstract NamingEnumeration<Binding> c_listBindings(Name name,
Continuation cont) throws NamingException;
protected abstract void c_bind(Name name, Object obj, Continuation cont)
throws NamingException;
protected abstract void c_rebind(Name name, Object obj, Continuation cont)
throws NamingException;
protected abstract void c_unbind(Name name, Continuation cont)
throws NamingException;
protected abstract void c_destroySubcontext(Name name, Continuation cont)
throws NamingException;
protected abstract Context c_createSubcontext(Name name,
Continuation cont) throws NamingException;
protected abstract void c_rename(Name oldname, Name newname,
Continuation cont) throws NamingException;
protected abstract NameParser c_getNameParser(Name name, Continuation cont)
throws NamingException;
// ------ Methods that may need to be overridden by subclass
/* Parsing method */
/**
* Determines which of the first components of 'name' belong
* to this naming system.
* If no components belong to this naming system, return
* the empty name (new CompositeName()) as the head,
* and the entire name as the tail.
*
* The default implementation supports strong separation.
* If the name is empty or if the first component is empty,
* head is the empty name and tail is the entire name.
* (This means that this context does not have any name to work with).
* Otherwise, it returns the first component as head, and the rest of
* the components as tail.
*
* Subclass should override this method according its own policies.
*
* For example, a weakly separated system with dynamic boundary
* determination would simply return as head 'name'.
* A weakly separated with static boundary
* determination would select the components in the front of 'name'
* that conform to some syntax rules. (e.g. in X.500 syntax, perhaps
* select front components that have a equal sign).
* If none conforms, return an empty name.
*/
protected HeadTail p_parseComponent(Name name, Continuation cont)
throws NamingException {
int separator;
// if no name to parse, or if we're already at boundary
if (name.isEmpty() || name.get(0).isEmpty()) {
separator = 0;
} else {
separator = 1;
}
Name head, tail;
if (name instanceof CompositeName) {
head = name.getPrefix(separator);
tail = name.getSuffix(separator);
} else {
// treat like compound name
head = new CompositeName().add(name.toString());
tail = null;
}
if (debug > 2) {
System.err.println("ORIG: " + name);
System.err.println("PREFIX: " + name);
System.err.println("SUFFIX: " + null);
}
return new HeadTail(head, tail);
}
/* Resolution method for supporting federation */
/**
* Resolves the nns for 'name' when the named context is acting
* as an intermediate context.
*
* For a system that supports only junctions, this would be
* equivalent to
* c_lookup(name, cont);
* because for junctions, an intermediate slash simply signifies
* a syntactic separator.
*
* For a system that supports only implicit nns, this would be
* equivalent to
* c_lookup_nns(name, cont);
* because for implicit nns, a slash always signifies the implicit nns,
* regardless of whether it is intermediate or trailing.
*
* By default this method supports junctions, and also allows for an
* implicit nns to be dynamically determined through the use of the
* "nns" reference (see c_processJunction_nns()).
* Contexts that implement implicit nns directly should provide an
* appropriate override.
*
* A junction, by definition, is a binding of a name in one
* namespace to an object in another. The default implementation
* of this method detects the crossover into another namespace
* using the following heuristic: there is a junction when "name"
* resolves to a context that is not an instance of
* this.getClass(). Contexts supporting junctions for which this
* heuristic is inappropriate should override this method.
*/
protected Object c_resolveIntermediate_nns(Name name, Continuation cont)
throws NamingException {
try {
final Object obj = c_lookup(name, cont);
// Do not append "" to Continuation 'cont' even if set
// because the intention is to ignore the nns
if (obj != null && getClass().isInstance(obj)) {
// If "obj" is in the same type as this object, it must
// not be a junction. Continue the lookup with "/".
cont.setContinueNNS(obj, name, this);
return null;
} else if (obj != null && !(obj instanceof Context)) {
// obj is not even a context, so try to find its nns
// dynamically by constructing a Reference containing obj.
RefAddr addr = new RefAddr("nns") {
public Object getContent() {
return obj;
}
private static final long serialVersionUID =
-8831204798861786362L;
};
Reference ref = new Reference("java.lang.Object", addr);
// Resolved name has trailing slash to indicate nns
CompositeName resName = (CompositeName)name.clone();
resName.add(""); // add trailing slash
// Set continuation leave it to
// PartialCompositeContext.getPCContext() to throw CPE.
// Do not use setContinueNNS() because we've already
// consumed "/" (i.e., moved it to resName).
cont.setContinue(ref, resName, this);
return null;
} else {
// Consume "/" and continue
return obj;
}
} catch (NamingException e) {
e.appendRemainingComponent(""); // add nns back
throw e;
}
}
/* Equivalent of Context Methods for supporting nns */
// The following methods are called when the Context methods
// are invoked with a name that has a trailing slash.
// For naming systems that support implicit nns,
// the trailing slash signifies the implicit nns.
// For such naming systems, override these c_*_nns methods.
//
// For naming systems that do not support implicit nns, the
// default implementations here throw an exception. See
// c_processJunction_nns() for details.
protected Object c_lookup_nns(Name name, Continuation cont)
throws NamingException {
c_processJunction_nns(name, cont);
return null;
}
protected Object c_lookupLink_nns(Name name, Continuation cont)
throws NamingException {
c_processJunction_nns(name, cont);
return null;
}
protected NamingEnumeration<NameClassPair> c_list_nns(Name name,
Continuation cont) throws NamingException {
c_processJunction_nns(name, cont);
return null;
}
protected NamingEnumeration<Binding> c_listBindings_nns(Name name,
Continuation cont) throws NamingException {
c_processJunction_nns(name, cont);
return null;
}
protected void c_bind_nns(Name name, Object obj, Continuation cont)
throws NamingException {
c_processJunction_nns(name, cont);
}
protected void c_rebind_nns(Name name, Object obj, Continuation cont)
throws NamingException {
c_processJunction_nns(name, cont);
}
protected void c_unbind_nns(Name name, Continuation cont)
throws NamingException {
c_processJunction_nns(name, cont);
}
protected Context c_createSubcontext_nns(Name name,
Continuation cont) throws NamingException {
c_processJunction_nns(name, cont);
return null;
}
protected void c_destroySubcontext_nns(Name name, Continuation cont)
throws NamingException {
c_processJunction_nns(name, cont);
}
protected void c_rename_nns(Name oldname, Name newname, Continuation cont)
throws NamingException {
c_processJunction_nns(oldname, cont);
}
protected NameParser c_getNameParser_nns(Name name, Continuation cont)
throws NamingException {
c_processJunction_nns(name, cont);
return null;
}
// ------ internal method used by ComponentContext
/**
* Locates the nns using the default policy. This policy fully
* handles junctions, but otherwise throws an exception when an
* attempt is made to resolve an implicit nns.
*
* The default policy is as follows: If there is a junction in
* the namespace, then resolve to the junction and continue the
* operation there (thus deferring to that context to find its own
* nns). Otherwise, resolve as far as possible and then throw
* CannotProceedException with the resolved object being a reference:
* the address type is "nns", and the address contents is this
* context.
*
* For example, when c_bind_nns(name, obj, ...) is invoked, the
* caller is attempting to bind the object "obj" to the nns of
* "name". If "name" is a junction, it names an object in another
* naming system that (presumably) has an nns. c_bind_nns() will
* first resolve "name" to a context and then attempt to continue
* the bind operation there, (thus binding to the nns of the
* context named by "name"). If "name" is empty then throw an
* exception, since this context does not by default support an
* implicit nns.
*
* To implement a context that does support an implicit nns, it is
* necessary to override this default policy. This is done by
* overriding the c_*_nns() methods (which each call this method
* by default).
*/
protected void c_processJunction_nns(Name name, Continuation cont)
throws NamingException
{
if (name.isEmpty()) {
// Construct a new Reference that contains this context.
RefAddr addr = new RefAddr("nns") {
public Object getContent() {
return ComponentContext.this;
}
private static final long serialVersionUID =
-1389472957988053402L;
};
Reference ref = new Reference("java.lang.Object", addr);
// Set continuation leave it to PartialCompositeContext.getPCContext()
// to throw the exception.
// Do not use setContinueNNS() because we've are
// setting relativeResolvedName to "/".
cont.setContinue(ref, _NNS_NAME, this);
return;
}
try {
// lookup name to continue operation in nns
Object target = c_lookup(name, cont);
if (cont.isContinue())
cont.appendRemainingComponent("");
else {
cont.setContinueNNS(target, name, this);
}
} catch (NamingException e) {
e.appendRemainingComponent(""); // add nns back
throw e;
}
}
protected static final byte USE_CONTINUATION = 1;
protected static final byte TERMINAL_COMPONENT = 2;
protected static final byte TERMINAL_NNS_COMPONENT = 3;
/**
* Determine whether 'name' is a terminal component in
* this naming system.
* If so, return status indicating so, so that caller
* can perform context operation on this name.
*
* If not, then the first component(s) of 'name' names
* an intermediate context. In that case, resolve these components
* and set Continuation to be the object named.
*
* see test cases at bottom of file.
*/
protected HeadTail p_resolveIntermediate(Name name, Continuation cont)
throws NamingException {
int ret = USE_CONTINUATION;
cont.setSuccess(); // initialize
HeadTail p = p_parseComponent(name, cont);
Name tail = p.getTail();
Name head = p.getHead();
if (tail == null || tail.isEmpty()) {
//System.out.println("terminal : " + head);
ret = TERMINAL_COMPONENT;
} else if (!tail.get(0).isEmpty()) {
// tail does not begin with "/"
/*
if (head.isEmpty()) {
// Context could not find name that it can use
// illegal syntax error or name not found
//System.out.println("nnf exception : " + head);
NamingException e = new NameNotFoundException();
cont.setError(this, name);
throw cont.fillInException(e);
} else {
*/
// head is being used as intermediate context,
// resolve head and set Continuation with tail
try {
Object obj = c_resolveIntermediate_nns(head, cont);
//System.out.println("resInter : " + head + "=" + obj);
if (obj != null)
cont.setContinue(obj, head, this, tail);
else if (cont.isContinue()) {
checkAndAdjustRemainingName(cont.getRemainingName());
cont.appendRemainingName(tail);
}
} catch (NamingException e) {
checkAndAdjustRemainingName(e.getRemainingName());
e.appendRemainingName(tail);
throw e;
}
/*
}
*/
} else {
// tail begins with "/"
if (tail.size() == 1) {
ret = TERMINAL_NNS_COMPONENT;
//System.out.println("terminal_nns : " + head);
} else if (head.isEmpty() || isAllEmpty(tail)) {
// resolve nns of head and continue with tail.getSuffix(1)
Name newTail = tail.getSuffix(1);
try {
Object obj = c_lookup_nns(head, cont);
//System.out.println("lookup_nns : " + head + "=" + obj);
if (obj != null)
cont.setContinue(obj, head, this, newTail);
else if (cont.isContinue()) {
cont.appendRemainingName(newTail);
// Name rname = cont.getRemainingName();
//System.out.println("cont.rname" + rname);
}
} catch (NamingException e) {
e.appendRemainingName(newTail);
throw e;
}
} else {
// head is being used as intermediate context
// resolve and set continuation to tail
try {
Object obj = c_resolveIntermediate_nns(head, cont);
//System.out.println("resInter2 : " + head + "=" + obj);
if (obj != null)
cont.setContinue(obj, head, this, tail);
else if (cont.isContinue()) {
checkAndAdjustRemainingName(cont.getRemainingName());
cont.appendRemainingName(tail);
}
} catch (NamingException e) {
checkAndAdjustRemainingName(e.getRemainingName());
e.appendRemainingName(tail);
throw e;
}
}
}
p.setStatus(ret);
return p;
}
// When c_resolveIntermediate_nns() or c_lookup_nns() sets up
// its continuation, to indicate "nns", it appends an empty
// component to the remaining name (e.g. "eng/"). If last
// component of remaining name is empty; delete empty component
// before appending tail so that composition of the names work
// correctly. For example, when merging "eng/" and "c.b.a", we want
// the result to be "eng/c.b.a" because the trailing slash in eng
// is extraneous. When merging "" and "c.b.a", we want the result
// to be "/c.b.a" and so must keep the trailing slash (empty name).
void checkAndAdjustRemainingName(Name rname) throws InvalidNameException {
int count;
if (rname != null && (count=rname.size()) > 1 &&
rname.get(count-1).isEmpty()) {
rname.remove(count-1);
}
}
// Returns true if n contains only empty components
protected boolean isAllEmpty(Name n) {
int count = n.size();
for (int i =0; i < count; i++ ) {
if (!n.get(i).isEmpty()) {
return false;
}
}
return true;
}
// ------ implementations of p_ Resolver and Context methods using
// ------ corresponding c_ and c_*_nns methods
/* implementation for Resolver method */
protected ResolveResult p_resolveToClass(Name name,
Class<?> contextType,
Continuation cont)
throws NamingException {
if (contextType.isInstance(this)) {
cont.setSuccess();
return (new ResolveResult(this, name));
}
ResolveResult ret = null;
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
Object obj = p_lookup(name, cont);
if (!cont.isContinue() && contextType.isInstance(obj)) {
ret = new ResolveResult(obj, _EMPTY_NAME);
}
break;
case TERMINAL_COMPONENT:
cont.setSuccess(); // no contextType found; return null
break;
default:
/* USE_CONTINUATION */
/* pcont already set or exception thrown */
break;
}
return ret;
}
/* implementations of p_ Context methods */
protected Object p_lookup(Name name, Continuation cont) throws NamingException {
Object ret = null;
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
ret = c_lookup_nns(res.getHead(), cont);
if (ret instanceof LinkRef) {
cont.setContinue(ret, res.getHead(), this);
ret = null;
}
break;
case TERMINAL_COMPONENT:
ret = c_lookup(res.getHead(), cont);
if (ret instanceof LinkRef) {
cont.setContinue(ret, res.getHead(), this);
ret = null;
}
break;
default:
/* USE_CONTINUATION */
/* pcont already set or exception thrown */
break;
}
return ret;
}
protected NamingEnumeration<NameClassPair> p_list(Name name, Continuation cont)
throws NamingException {
NamingEnumeration<NameClassPair> ret = null;
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
if (debug > 0)
System.out.println("c_list_nns(" + res.getHead() + ")");
ret = c_list_nns(res.getHead(), cont);
break;
case TERMINAL_COMPONENT:
if (debug > 0)
System.out.println("c_list(" + res.getHead() + ")");
ret = c_list(res.getHead(), cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
return ret;
}
protected NamingEnumeration<Binding> p_listBindings(Name name, Continuation cont) throws
NamingException {
NamingEnumeration<Binding> ret = null;
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
ret = c_listBindings_nns(res.getHead(), cont);
break;
case TERMINAL_COMPONENT:
ret = c_listBindings(res.getHead(), cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
return ret;
}
protected void p_bind(Name name, Object obj, Continuation cont) throws
NamingException {
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
c_bind_nns(res.getHead(), obj, cont);
break;
case TERMINAL_COMPONENT:
c_bind(res.getHead(), obj, cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
}
protected void p_rebind(Name name, Object obj, Continuation cont) throws
NamingException {
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
c_rebind_nns(res.getHead(), obj, cont);
break;
case TERMINAL_COMPONENT:
c_rebind(res.getHead(), obj, cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
}
protected void p_unbind(Name name, Continuation cont) throws
NamingException {
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
c_unbind_nns(res.getHead(), cont);
break;
case TERMINAL_COMPONENT:
c_unbind(res.getHead(), cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
}
protected void p_destroySubcontext(Name name, Continuation cont) throws
NamingException {
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
c_destroySubcontext_nns(res.getHead(), cont);
break;
case TERMINAL_COMPONENT:
c_destroySubcontext(res.getHead(), cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
}
protected Context p_createSubcontext(Name name, Continuation cont) throws
NamingException {
Context ret = null;
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
ret = c_createSubcontext_nns(res.getHead(), cont);
break;
case TERMINAL_COMPONENT:
ret = c_createSubcontext(res.getHead(), cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
return ret;
}
protected void p_rename(Name oldName, Name newName, Continuation cont) throws
NamingException {
HeadTail res = p_resolveIntermediate(oldName, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
c_rename_nns(res.getHead(), newName, cont);
break;
case TERMINAL_COMPONENT:
c_rename(res.getHead(), newName, cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
}
protected NameParser p_getNameParser(Name name, Continuation cont) throws
NamingException {
NameParser ret = null;
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
ret = c_getNameParser_nns(res.getHead(), cont);
break;
case TERMINAL_COMPONENT:
ret = c_getNameParser(res.getHead(), cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
return ret;
}
protected Object p_lookupLink(Name name, Continuation cont)
throws NamingException {
Object ret = null;
HeadTail res = p_resolveIntermediate(name, cont);
switch (res.getStatus()) {
case TERMINAL_NNS_COMPONENT:
ret = c_lookupLink_nns(res.getHead(), cont);
break;
case TERMINAL_COMPONENT:
ret = c_lookupLink(res.getHead(), cont);
break;
default:
/* USE_CONTINUATION */
/* cont already set or exception thrown */
break;
}
return ret;
}
}
/*
* How p_resolveIntermediate() should behave for various test cases
a.b/x {a.b, x}
c_resolveIntermediate_nns(a.b)
continue(x)
{x,}
terminal(x)
a.b/ {a.b, ""}
terminal_nns(a.b);
a.b//
{a.b, ("", "")}
c_lookup_nns(a.b)
continue({""})
{,""}
terminal_nns({})
/x {{}, {"", x}}
c_lookup_nns({})
continue(x)
{x,}
terminal(x)
//y {{}, {"", "", y}}
c_lookup_nns({})
continue({"", y})
{{}, {"", y}}
c_lookup_nns({})
continue(y)
{y,}
terminal(y)
a.b//y {a.b, {"", y}}
c_resolveIntermediate_nns(a.b)
continue({"", y})
{{}, {"",y}}
c_lookup_nns({});
continue(y)
{y,}
terminal(y);
*
*/
| gpl-2.0 |
Distrotech/icedtea7-2.3 | generated/org/omg/CosNaming/NamingContextPackage/AlreadyBoundHelper.java | 2322 | package org.omg.CosNaming.NamingContextPackage;
/**
* org/omg/CosNaming/NamingContextPackage/AlreadyBoundHelper.java .
* Generated by the IDL-to-Java compiler (portable), version "3.2"
* from ../../../../src/share/classes/org/omg/CosNaming/nameservice.idl
* Friday, May 25, 2007 3:39:57 o'clock PM GMT-05:00
*/
abstract public class AlreadyBoundHelper
{
private static String _id = "IDL:omg.org/CosNaming/NamingContext/AlreadyBound:1.0";
public static void insert (org.omg.CORBA.Any a, org.omg.CosNaming.NamingContextPackage.AlreadyBound that)
{
org.omg.CORBA.portable.OutputStream out = a.create_output_stream ();
a.type (type ());
write (out, that);
a.read_value (out.create_input_stream (), type ());
}
public static org.omg.CosNaming.NamingContextPackage.AlreadyBound extract (org.omg.CORBA.Any a)
{
return read (a.create_input_stream ());
}
private static org.omg.CORBA.TypeCode __typeCode = null;
private static boolean __active = false;
synchronized public static org.omg.CORBA.TypeCode type ()
{
if (__typeCode == null)
{
synchronized (org.omg.CORBA.TypeCode.class)
{
if (__typeCode == null)
{
if (__active)
{
return org.omg.CORBA.ORB.init().create_recursive_tc ( _id );
}
__active = true;
org.omg.CORBA.StructMember[] _members0 = new org.omg.CORBA.StructMember [0];
org.omg.CORBA.TypeCode _tcOf_members0 = null;
__typeCode = org.omg.CORBA.ORB.init ().create_exception_tc (org.omg.CosNaming.NamingContextPackage.AlreadyBoundHelper.id (), "AlreadyBound", _members0);
__active = false;
}
}
}
return __typeCode;
}
public static String id ()
{
return _id;
}
public static org.omg.CosNaming.NamingContextPackage.AlreadyBound read (org.omg.CORBA.portable.InputStream istream)
{
org.omg.CosNaming.NamingContextPackage.AlreadyBound value = new org.omg.CosNaming.NamingContextPackage.AlreadyBound ();
// read and discard the repository ID
istream.read_string ();
return value;
}
public static void write (org.omg.CORBA.portable.OutputStream ostream, org.omg.CosNaming.NamingContextPackage.AlreadyBound value)
{
// write the repository ID
ostream.write_string (id ());
}
}
| gpl-2.0 |
teamfx/openjfx-9-dev-rt | modules/javafx.graphics/src/main/java/com/sun/javafx/font/MacFontFinder.java | 4324 | /*
* Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.javafx.font;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Locale;
import com.sun.glass.utils.NativeLibLoader;
class MacFontFinder {
static {
AccessController.doPrivileged(
(PrivilegedAction<Void>) () -> {
NativeLibLoader.loadLibrary("javafx_font");
return null;
}
);
}
private static final int SystemFontType = 2; /*kCTFontSystemFontType*/
private static final int MonospacedFontType = 1; /*kCTFontUserFixedPitchFontType*/
private native static String getFont(int type);
public static String getSystemFont() {
return getFont(SystemFontType);
}
public static String getMonospacedFont() {
return getFont(MonospacedFontType);
}
native static float getSystemFontSize();
public static boolean populateFontFileNameMap(
HashMap<String,String> fontToFileMap,
HashMap<String,String> fontToFamilyNameMap,
HashMap<String,ArrayList<String>> familyToFontListMap,
Locale locale) {
if (fontToFileMap == null ||
fontToFamilyNameMap == null ||
familyToFontListMap == null) {
return false;
}
if (locale == null) {
locale = Locale.ENGLISH;
}
String[] fontData = getFontData();
if (fontData == null) return false;
int i = 0;
while (i < fontData.length) {
String name = fontData[i++];
String family = fontData[i++];
String file = fontData[i++];
if (!PrismFontFactory.useNativeRasterizer) {
/* Skip OTF/CID keyed fonts for T2K (RT-15755) */
if (file.endsWith(".otf")) {
if (name.indexOf(" Pro W") != -1) continue;
if (name.indexOf(" ProN W") != -1) continue;
if (name.indexOf(" Std W") != -1) continue;
if (name.indexOf(" StdN W") != -1) continue;
if (name.indexOf("Hiragino") != -1) continue;
}
}
if (PrismFontFactory.debugFonts) {
System.err.println("[MacFontFinder] Name=" + name);
System.err.println("\tFamily=" + family);
System.err.println("\tFile=" + file);
}
String lcName = name.toLowerCase(locale);
String lcFamily = family.toLowerCase(locale);
fontToFileMap.put(lcName, file);
fontToFamilyNameMap.put(lcName, family);
ArrayList<String> list = familyToFontListMap.get(lcFamily);
if (list == null) {
list = new ArrayList<String>();
familyToFontListMap.put(lcFamily, list);
}
list.add(name);
}
return true;
}
/*
*
* @param familyName
* @return array of post-script font names
*/
private native static String[] getFontData();
}
| gpl-2.0 |
sommerc/bioformats | components/formats-gpl/test/loci/formats/utests/xml/OMEXMLServiceTest.java | 2953 | /*
* #%L
* OME Bio-Formats package for reading and converting biological file formats.
* %%
* Copyright (C) 2005 - 2015 Open Microscopy Environment:
* - Board of Regents of the University of Wisconsin-Madison
* - Glencoe Software, Inc.
* - University of Dundee
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
package loci.formats.utests.xml;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
import java.io.IOException;
import java.io.InputStream;
import loci.common.services.DependencyException;
import loci.common.services.ServiceException;
import loci.common.services.ServiceFactory;
import loci.formats.services.OMEXMLService;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/**
*
* @author Chris Allan <callan at blackcat dot ca>
*/
public class OMEXMLServiceTest {
private static final String XML_FILE = "2008-09.ome";
private OMEXMLService service;
private String xml;
@BeforeMethod
public void setUp() throws DependencyException, IOException {
ServiceFactory sf = new ServiceFactory();
service = sf.getInstance(OMEXMLService.class);
InputStream s = OMEXMLServiceTest.class.getResourceAsStream(XML_FILE);
byte[] b = new byte[s.available()];
s.read(b);
s.close();
xml = new String(b);
}
@Test
public void testGetLatestVersion() {
assertEquals("2015-01", service.getLatestVersion());
}
@Test
public void testCreateEmptyOMEXMLMetadata() throws ServiceException {
assertNotNull(service.createOMEXMLMetadata());
}
@Test
public void testCreateOMEXMLMetadata() throws ServiceException {
assertNotNull(service.createOMEXMLMetadata(xml));
}
@Test
public void testCreateOMEXMLRoot() throws ServiceException {
assertNotNull(service.createOMEXMLRoot(xml));
}
@Test
public void isOMEXMLMetadata() throws ServiceException {
assertEquals(true,
service.isOMEXMLMetadata(service.createOMEXMLMetadata()));
}
@Test
public void getOMEXMLVersion() throws ServiceException {
assertEquals("2015-01",
service.getOMEXMLVersion(service.createOMEXMLMetadata(xml)));
}
@Test
public void getOMEXML() throws ServiceException {
assertNotNull(service.getOMEXML(service.createOMEXMLMetadata(xml)));
}
}
| gpl-2.0 |
traff/intellij-ocaml | OCamlSources/src/manuylov/maxim/ocaml/lang/parser/psi/element/OCamlCommaExpression.java | 954 | /*
* OCaml Support For IntelliJ Platform.
* Copyright (C) 2010 Maxim Manuylov
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/gpl-2.0.html>.
*/
package manuylov.maxim.ocaml.lang.parser.psi.element;
/**
* @author Maxim.Manuylov
* Date: 21.03.2009
*/
public interface OCamlCommaExpression extends OCamlExpression {
}
| gpl-2.0 |
Taichi-SHINDO/jdk9-jdk | test/java/awt/Modal/ModalBlockingTests/BlockingWindowsSetModal3Test.java | 1818 | /*
* Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 8049617
* @summary Check whether a modal Dialog created with a hidden Frame
* constructor receives focus; whether its components receive focus
* and respond to key events, when there are other windows shown.
* Also check the correctness of blocking behavior for other windows shown.
*
* @library ../helpers ../../../../lib/testlibrary/
* @build ExtendedRobot
* @build Flag
* @build TestDialog
* @build TestFrame
* @build TestWindow
* @run main BlockingWindowsSetModal3Test
*/
public class BlockingWindowsSetModal3Test {
public static void main(String[] args) throws Exception {
(new BlockingWindowsTest(
BlockingWindowsTest.DialogOwner.HIDDEN_FRAME)).doTest();
}
}
| gpl-2.0 |
YouDiSN/OpenJDK-Research | jdk9/jdk/src/java.base/share/classes/java/math/BigDecimal.java | 235922 | /*
* Copyright (c) 1996, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* Portions Copyright IBM Corporation, 2001. All Rights Reserved.
*/
package java.math;
import static java.math.BigInteger.LONG_MASK;
import java.util.Arrays;
/**
* Immutable, arbitrary-precision signed decimal numbers. A
* {@code BigDecimal} consists of an arbitrary precision integer
* <i>unscaled value</i> and a 32-bit integer <i>scale</i>. If zero
* or positive, the scale is the number of digits to the right of the
* decimal point. If negative, the unscaled value of the number is
* multiplied by ten to the power of the negation of the scale. The
* value of the number represented by the {@code BigDecimal} is
* therefore <code>(unscaledValue × 10<sup>-scale</sup>)</code>.
*
* <p>The {@code BigDecimal} class provides operations for
* arithmetic, scale manipulation, rounding, comparison, hashing, and
* format conversion. The {@link #toString} method provides a
* canonical representation of a {@code BigDecimal}.
*
* <p>The {@code BigDecimal} class gives its user complete control
* over rounding behavior. If no rounding mode is specified and the
* exact result cannot be represented, an exception is thrown;
* otherwise, calculations can be carried out to a chosen precision
* and rounding mode by supplying an appropriate {@link MathContext}
* object to the operation. In either case, eight <em>rounding
* modes</em> are provided for the control of rounding. Using the
* integer fields in this class (such as {@link #ROUND_HALF_UP}) to
* represent rounding mode is deprecated; the enumeration values
* of the {@code RoundingMode} {@code enum}, (such as {@link
* RoundingMode#HALF_UP}) should be used instead.
*
* <p>When a {@code MathContext} object is supplied with a precision
* setting of 0 (for example, {@link MathContext#UNLIMITED}),
* arithmetic operations are exact, as are the arithmetic methods
* which take no {@code MathContext} object. (This is the only
* behavior that was supported in releases prior to 5.) As a
* corollary of computing the exact result, the rounding mode setting
* of a {@code MathContext} object with a precision setting of 0 is
* not used and thus irrelevant. In the case of divide, the exact
* quotient could have an infinitely long decimal expansion; for
* example, 1 divided by 3. If the quotient has a nonterminating
* decimal expansion and the operation is specified to return an exact
* result, an {@code ArithmeticException} is thrown. Otherwise, the
* exact result of the division is returned, as done for other
* operations.
*
* <p>When the precision setting is not 0, the rules of
* {@code BigDecimal} arithmetic are broadly compatible with selected
* modes of operation of the arithmetic defined in ANSI X3.274-1996
* and ANSI X3.274-1996/AM 1-2000 (section 7.4). Unlike those
* standards, {@code BigDecimal} includes many rounding modes, which
* were mandatory for division in {@code BigDecimal} releases prior
* to 5. Any conflicts between these ANSI standards and the
* {@code BigDecimal} specification are resolved in favor of
* {@code BigDecimal}.
*
* <p>Since the same numerical value can have different
* representations (with different scales), the rules of arithmetic
* and rounding must specify both the numerical result and the scale
* used in the result's representation.
*
*
* <p>In general the rounding modes and precision setting determine
* how operations return results with a limited number of digits when
* the exact result has more digits (perhaps infinitely many in the
* case of division and square root) than the number of digits returned.
*
* First, the
* total number of digits to return is specified by the
* {@code MathContext}'s {@code precision} setting; this determines
* the result's <i>precision</i>. The digit count starts from the
* leftmost nonzero digit of the exact result. The rounding mode
* determines how any discarded trailing digits affect the returned
* result.
*
* <p>For all arithmetic operators , the operation is carried out as
* though an exact intermediate result were first calculated and then
* rounded to the number of digits specified by the precision setting
* (if necessary), using the selected rounding mode. If the exact
* result is not returned, some digit positions of the exact result
* are discarded. When rounding increases the magnitude of the
* returned result, it is possible for a new digit position to be
* created by a carry propagating to a leading {@literal "9"} digit.
* For example, rounding the value 999.9 to three digits rounding up
* would be numerically equal to one thousand, represented as
* 100×10<sup>1</sup>. In such cases, the new {@literal "1"} is
* the leading digit position of the returned result.
*
* <p>Besides a logical exact result, each arithmetic operation has a
* preferred scale for representing a result. The preferred
* scale for each operation is listed in the table below.
*
* <table class="plain">
* <caption><b>Preferred Scales for Results of Arithmetic Operations
* </b></caption>
* <thead>
* <tr><th>Operation</th><th>Preferred Scale of Result</th></tr>
* </thead>
* <tbody>
* <tr><td>Add</td><td>max(addend.scale(), augend.scale())</td>
* <tr><td>Subtract</td><td>max(minuend.scale(), subtrahend.scale())</td>
* <tr><td>Multiply</td><td>multiplier.scale() + multiplicand.scale()</td>
* <tr><td>Divide</td><td>dividend.scale() - divisor.scale()</td>
* <tr><td>Square root</td><td>radicand.scale()/2</td>
* </tbody>
* </table>
*
* These scales are the ones used by the methods which return exact
* arithmetic results; except that an exact divide may have to use a
* larger scale since the exact result may have more digits. For
* example, {@code 1/32} is {@code 0.03125}.
*
* <p>Before rounding, the scale of the logical exact intermediate
* result is the preferred scale for that operation. If the exact
* numerical result cannot be represented in {@code precision}
* digits, rounding selects the set of digits to return and the scale
* of the result is reduced from the scale of the intermediate result
* to the least scale which can represent the {@code precision}
* digits actually returned. If the exact result can be represented
* with at most {@code precision} digits, the representation
* of the result with the scale closest to the preferred scale is
* returned. In particular, an exactly representable quotient may be
* represented in fewer than {@code precision} digits by removing
* trailing zeros and decreasing the scale. For example, rounding to
* three digits using the {@linkplain RoundingMode#FLOOR floor}
* rounding mode, <br>
*
* {@code 19/100 = 0.19 // integer=19, scale=2} <br>
*
* but<br>
*
* {@code 21/110 = 0.190 // integer=190, scale=3} <br>
*
* <p>Note that for add, subtract, and multiply, the reduction in
* scale will equal the number of digit positions of the exact result
* which are discarded. If the rounding causes a carry propagation to
* create a new high-order digit position, an additional digit of the
* result is discarded than when no new digit position is created.
*
* <p>Other methods may have slightly different rounding semantics.
* For example, the result of the {@code pow} method using the
* {@linkplain #pow(int, MathContext) specified algorithm} can
* occasionally differ from the rounded mathematical result by more
* than one unit in the last place, one <i>{@linkplain #ulp() ulp}</i>.
*
* <p>Two types of operations are provided for manipulating the scale
* of a {@code BigDecimal}: scaling/rounding operations and decimal
* point motion operations. Scaling/rounding operations ({@link
* #setScale setScale} and {@link #round round}) return a
* {@code BigDecimal} whose value is approximately (or exactly) equal
* to that of the operand, but whose scale or precision is the
* specified value; that is, they increase or decrease the precision
* of the stored number with minimal effect on its value. Decimal
* point motion operations ({@link #movePointLeft movePointLeft} and
* {@link #movePointRight movePointRight}) return a
* {@code BigDecimal} created from the operand by moving the decimal
* point a specified distance in the specified direction.
*
* <p>For the sake of brevity and clarity, pseudo-code is used
* throughout the descriptions of {@code BigDecimal} methods. The
* pseudo-code expression {@code (i + j)} is shorthand for "a
* {@code BigDecimal} whose value is that of the {@code BigDecimal}
* {@code i} added to that of the {@code BigDecimal}
* {@code j}." The pseudo-code expression {@code (i == j)} is
* shorthand for "{@code true} if and only if the
* {@code BigDecimal} {@code i} represents the same value as the
* {@code BigDecimal} {@code j}." Other pseudo-code expressions
* are interpreted similarly. Square brackets are used to represent
* the particular {@code BigInteger} and scale pair defining a
* {@code BigDecimal} value; for example [19, 2] is the
* {@code BigDecimal} numerically equal to 0.19 having a scale of 2.
*
*
* <p>All methods and constructors for this class throw
* {@code NullPointerException} when passed a {@code null} object
* reference for any input parameter.
*
* @apiNote Care should be exercised if {@code BigDecimal} objects
* are used as keys in a {@link java.util.SortedMap SortedMap} or
* elements in a {@link java.util.SortedSet SortedSet} since
* {@code BigDecimal}'s <i>natural ordering</i> is <em>inconsistent
* with equals</em>. See {@link Comparable}, {@link
* java.util.SortedMap} or {@link java.util.SortedSet} for more
* information.
*
* @see BigInteger
* @see MathContext
* @see RoundingMode
* @see java.util.SortedMap
* @see java.util.SortedSet
* @author Josh Bloch
* @author Mike Cowlishaw
* @author Joseph D. Darcy
* @author Sergey V. Kuksenko
* @since 1.1
*/
public class BigDecimal extends Number implements Comparable<BigDecimal> {
/**
* The unscaled value of this BigDecimal, as returned by {@link
* #unscaledValue}.
*
* @serial
* @see #unscaledValue
*/
private final BigInteger intVal;
/**
* The scale of this BigDecimal, as returned by {@link #scale}.
*
* @serial
* @see #scale
*/
private final int scale; // Note: this may have any value, so
// calculations must be done in longs
/**
* The number of decimal digits in this BigDecimal, or 0 if the
* number of digits are not known (lookaside information). If
* nonzero, the value is guaranteed correct. Use the precision()
* method to obtain and set the value if it might be 0. This
* field is mutable until set nonzero.
*
* @since 1.5
*/
private transient int precision;
/**
* Used to store the canonical string representation, if computed.
*/
private transient String stringCache;
/**
* Sentinel value for {@link #intCompact} indicating the
* significand information is only available from {@code intVal}.
*/
static final long INFLATED = Long.MIN_VALUE;
private static final BigInteger INFLATED_BIGINT = BigInteger.valueOf(INFLATED);
/**
* If the absolute value of the significand of this BigDecimal is
* less than or equal to {@code Long.MAX_VALUE}, the value can be
* compactly stored in this field and used in computations.
*/
private final transient long intCompact;
// All 18-digit base ten strings fit into a long; not all 19-digit
// strings will
private static final int MAX_COMPACT_DIGITS = 18;
/* Appease the serialization gods */
private static final long serialVersionUID = 6108874887143696463L;
private static final ThreadLocal<StringBuilderHelper>
threadLocalStringBuilderHelper = new ThreadLocal<StringBuilderHelper>() {
@Override
protected StringBuilderHelper initialValue() {
return new StringBuilderHelper();
}
};
// Cache of common small BigDecimal values.
private static final BigDecimal ZERO_THROUGH_TEN[] = {
new BigDecimal(BigInteger.ZERO, 0, 0, 1),
new BigDecimal(BigInteger.ONE, 1, 0, 1),
new BigDecimal(BigInteger.TWO, 2, 0, 1),
new BigDecimal(BigInteger.valueOf(3), 3, 0, 1),
new BigDecimal(BigInteger.valueOf(4), 4, 0, 1),
new BigDecimal(BigInteger.valueOf(5), 5, 0, 1),
new BigDecimal(BigInteger.valueOf(6), 6, 0, 1),
new BigDecimal(BigInteger.valueOf(7), 7, 0, 1),
new BigDecimal(BigInteger.valueOf(8), 8, 0, 1),
new BigDecimal(BigInteger.valueOf(9), 9, 0, 1),
new BigDecimal(BigInteger.TEN, 10, 0, 2),
};
// Cache of zero scaled by 0 - 15
private static final BigDecimal[] ZERO_SCALED_BY = {
ZERO_THROUGH_TEN[0],
new BigDecimal(BigInteger.ZERO, 0, 1, 1),
new BigDecimal(BigInteger.ZERO, 0, 2, 1),
new BigDecimal(BigInteger.ZERO, 0, 3, 1),
new BigDecimal(BigInteger.ZERO, 0, 4, 1),
new BigDecimal(BigInteger.ZERO, 0, 5, 1),
new BigDecimal(BigInteger.ZERO, 0, 6, 1),
new BigDecimal(BigInteger.ZERO, 0, 7, 1),
new BigDecimal(BigInteger.ZERO, 0, 8, 1),
new BigDecimal(BigInteger.ZERO, 0, 9, 1),
new BigDecimal(BigInteger.ZERO, 0, 10, 1),
new BigDecimal(BigInteger.ZERO, 0, 11, 1),
new BigDecimal(BigInteger.ZERO, 0, 12, 1),
new BigDecimal(BigInteger.ZERO, 0, 13, 1),
new BigDecimal(BigInteger.ZERO, 0, 14, 1),
new BigDecimal(BigInteger.ZERO, 0, 15, 1),
};
// Half of Long.MIN_VALUE & Long.MAX_VALUE.
private static final long HALF_LONG_MAX_VALUE = Long.MAX_VALUE / 2;
private static final long HALF_LONG_MIN_VALUE = Long.MIN_VALUE / 2;
// Constants
/**
* The value 0, with a scale of 0.
*
* @since 1.5
*/
public static final BigDecimal ZERO =
ZERO_THROUGH_TEN[0];
/**
* The value 1, with a scale of 0.
*
* @since 1.5
*/
public static final BigDecimal ONE =
ZERO_THROUGH_TEN[1];
/**
* The value 10, with a scale of 0.
*
* @since 1.5
*/
public static final BigDecimal TEN =
ZERO_THROUGH_TEN[10];
/**
* The value 0.1, with a scale of 1.
*/
private static final BigDecimal ONE_TENTH = valueOf(1L, 1);
/**
* The value 0.5, with a scale of 1.
*/
private static final BigDecimal ONE_HALF = valueOf(5L, 1);
// Constructors
/**
* Trusted package private constructor.
* Trusted simply means if val is INFLATED, intVal could not be null and
* if intVal is null, val could not be INFLATED.
*/
BigDecimal(BigInteger intVal, long val, int scale, int prec) {
this.scale = scale;
this.precision = prec;
this.intCompact = val;
this.intVal = intVal;
}
/**
* Translates a character array representation of a
* {@code BigDecimal} into a {@code BigDecimal}, accepting the
* same sequence of characters as the {@link #BigDecimal(String)}
* constructor, while allowing a sub-array to be specified.
*
* @implNote If the sequence of characters is already available
* within a character array, using this constructor is faster than
* converting the {@code char} array to string and using the
* {@code BigDecimal(String)} constructor.
*
* @param in {@code char} array that is the source of characters.
* @param offset first character in the array to inspect.
* @param len number of characters to consider.
* @throws NumberFormatException if {@code in} is not a valid
* representation of a {@code BigDecimal} or the defined subarray
* is not wholly within {@code in}.
* @since 1.5
*/
public BigDecimal(char[] in, int offset, int len) {
this(in,offset,len,MathContext.UNLIMITED);
}
/**
* Translates a character array representation of a
* {@code BigDecimal} into a {@code BigDecimal}, accepting the
* same sequence of characters as the {@link #BigDecimal(String)}
* constructor, while allowing a sub-array to be specified and
* with rounding according to the context settings.
*
* @implNote If the sequence of characters is already available
* within a character array, using this constructor is faster than
* converting the {@code char} array to string and using the
* {@code BigDecimal(String)} constructor.
*
* @param in {@code char} array that is the source of characters.
* @param offset first character in the array to inspect.
* @param len number of characters to consider.
* @param mc the context to use.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @throws NumberFormatException if {@code in} is not a valid
* representation of a {@code BigDecimal} or the defined subarray
* is not wholly within {@code in}.
* @since 1.5
*/
public BigDecimal(char[] in, int offset, int len, MathContext mc) {
// protect against huge length.
if (offset + len > in.length || offset < 0)
throw new NumberFormatException("Bad offset or len arguments for char[] input.");
// This is the primary string to BigDecimal constructor; all
// incoming strings end up here; it uses explicit (inline)
// parsing for speed and generates at most one intermediate
// (temporary) object (a char[] array) for non-compact case.
// Use locals for all fields values until completion
int prec = 0; // record precision value
int scl = 0; // record scale value
long rs = 0; // the compact value in long
BigInteger rb = null; // the inflated value in BigInteger
// use array bounds checking to handle too-long, len == 0,
// bad offset, etc.
try {
// handle the sign
boolean isneg = false; // assume positive
if (in[offset] == '-') {
isneg = true; // leading minus means negative
offset++;
len--;
} else if (in[offset] == '+') { // leading + allowed
offset++;
len--;
}
// should now be at numeric part of the significand
boolean dot = false; // true when there is a '.'
long exp = 0; // exponent
char c; // current character
boolean isCompact = (len <= MAX_COMPACT_DIGITS);
// integer significand array & idx is the index to it. The array
// is ONLY used when we can't use a compact representation.
int idx = 0;
if (isCompact) {
// First compact case, we need not to preserve the character
// and we can just compute the value in place.
for (; len > 0; offset++, len--) {
c = in[offset];
if ((c == '0')) { // have zero
if (prec == 0)
prec = 1;
else if (rs != 0) {
rs *= 10;
++prec;
} // else digit is a redundant leading zero
if (dot)
++scl;
} else if ((c >= '1' && c <= '9')) { // have digit
int digit = c - '0';
if (prec != 1 || rs != 0)
++prec; // prec unchanged if preceded by 0s
rs = rs * 10 + digit;
if (dot)
++scl;
} else if (c == '.') { // have dot
// have dot
if (dot) // two dots
throw new NumberFormatException("Character array"
+ " contains more than one decimal point.");
dot = true;
} else if (Character.isDigit(c)) { // slow path
int digit = Character.digit(c, 10);
if (digit == 0) {
if (prec == 0)
prec = 1;
else if (rs != 0) {
rs *= 10;
++prec;
} // else digit is a redundant leading zero
} else {
if (prec != 1 || rs != 0)
++prec; // prec unchanged if preceded by 0s
rs = rs * 10 + digit;
}
if (dot)
++scl;
} else if ((c == 'e') || (c == 'E')) {
exp = parseExp(in, offset, len);
// Next test is required for backwards compatibility
if ((int) exp != exp) // overflow
throw new NumberFormatException("Exponent overflow.");
break; // [saves a test]
} else {
throw new NumberFormatException("Character " + c
+ " is neither a decimal digit number, decimal point, nor"
+ " \"e\" notation exponential mark.");
}
}
if (prec == 0) // no digits found
throw new NumberFormatException("No digits found.");
// Adjust scale if exp is not zero.
if (exp != 0) { // had significant exponent
scl = adjustScale(scl, exp);
}
rs = isneg ? -rs : rs;
int mcp = mc.precision;
int drop = prec - mcp; // prec has range [1, MAX_INT], mcp has range [0, MAX_INT];
// therefore, this subtract cannot overflow
if (mcp > 0 && drop > 0) { // do rounding
while (drop > 0) {
scl = checkScaleNonZero((long) scl - drop);
rs = divideAndRound(rs, LONG_TEN_POWERS_TABLE[drop], mc.roundingMode.oldMode);
prec = longDigitLength(rs);
drop = prec - mcp;
}
}
} else {
char coeff[] = new char[len];
for (; len > 0; offset++, len--) {
c = in[offset];
// have digit
if ((c >= '0' && c <= '9') || Character.isDigit(c)) {
// First compact case, we need not to preserve the character
// and we can just compute the value in place.
if (c == '0' || Character.digit(c, 10) == 0) {
if (prec == 0) {
coeff[idx] = c;
prec = 1;
} else if (idx != 0) {
coeff[idx++] = c;
++prec;
} // else c must be a redundant leading zero
} else {
if (prec != 1 || idx != 0)
++prec; // prec unchanged if preceded by 0s
coeff[idx++] = c;
}
if (dot)
++scl;
continue;
}
// have dot
if (c == '.') {
// have dot
if (dot) // two dots
throw new NumberFormatException("Character array"
+ " contains more than one decimal point.");
dot = true;
continue;
}
// exponent expected
if ((c != 'e') && (c != 'E'))
throw new NumberFormatException("Character array"
+ " is missing \"e\" notation exponential mark.");
exp = parseExp(in, offset, len);
// Next test is required for backwards compatibility
if ((int) exp != exp) // overflow
throw new NumberFormatException("Exponent overflow.");
break; // [saves a test]
}
// here when no characters left
if (prec == 0) // no digits found
throw new NumberFormatException("No digits found.");
// Adjust scale if exp is not zero.
if (exp != 0) { // had significant exponent
scl = adjustScale(scl, exp);
}
// Remove leading zeros from precision (digits count)
rb = new BigInteger(coeff, isneg ? -1 : 1, prec);
rs = compactValFor(rb);
int mcp = mc.precision;
if (mcp > 0 && (prec > mcp)) {
if (rs == INFLATED) {
int drop = prec - mcp;
while (drop > 0) {
scl = checkScaleNonZero((long) scl - drop);
rb = divideAndRoundByTenPow(rb, drop, mc.roundingMode.oldMode);
rs = compactValFor(rb);
if (rs != INFLATED) {
prec = longDigitLength(rs);
break;
}
prec = bigDigitLength(rb);
drop = prec - mcp;
}
}
if (rs != INFLATED) {
int drop = prec - mcp;
while (drop > 0) {
scl = checkScaleNonZero((long) scl - drop);
rs = divideAndRound(rs, LONG_TEN_POWERS_TABLE[drop], mc.roundingMode.oldMode);
prec = longDigitLength(rs);
drop = prec - mcp;
}
rb = null;
}
}
}
} catch (ArrayIndexOutOfBoundsException | NegativeArraySizeException e) {
NumberFormatException nfe = new NumberFormatException();
nfe.initCause(e);
throw nfe;
}
this.scale = scl;
this.precision = prec;
this.intCompact = rs;
this.intVal = rb;
}
private int adjustScale(int scl, long exp) {
long adjustedScale = scl - exp;
if (adjustedScale > Integer.MAX_VALUE || adjustedScale < Integer.MIN_VALUE)
throw new NumberFormatException("Scale out of range.");
scl = (int) adjustedScale;
return scl;
}
/*
* parse exponent
*/
private static long parseExp(char[] in, int offset, int len){
long exp = 0;
offset++;
char c = in[offset];
len--;
boolean negexp = (c == '-');
// optional sign
if (negexp || c == '+') {
offset++;
c = in[offset];
len--;
}
if (len <= 0) // no exponent digits
throw new NumberFormatException("No exponent digits.");
// skip leading zeros in the exponent
while (len > 10 && (c=='0' || (Character.digit(c, 10) == 0))) {
offset++;
c = in[offset];
len--;
}
if (len > 10) // too many nonzero exponent digits
throw new NumberFormatException("Too many nonzero exponent digits.");
// c now holds first digit of exponent
for (;; len--) {
int v;
if (c >= '0' && c <= '9') {
v = c - '0';
} else {
v = Character.digit(c, 10);
if (v < 0) // not a digit
throw new NumberFormatException("Not a digit.");
}
exp = exp * 10 + v;
if (len == 1)
break; // that was final character
offset++;
c = in[offset];
}
if (negexp) // apply sign
exp = -exp;
return exp;
}
/**
* Translates a character array representation of a
* {@code BigDecimal} into a {@code BigDecimal}, accepting the
* same sequence of characters as the {@link #BigDecimal(String)}
* constructor.
*
* @implNote If the sequence of characters is already available
* as a character array, using this constructor is faster than
* converting the {@code char} array to string and using the
* {@code BigDecimal(String)} constructor.
*
* @param in {@code char} array that is the source of characters.
* @throws NumberFormatException if {@code in} is not a valid
* representation of a {@code BigDecimal}.
* @since 1.5
*/
public BigDecimal(char[] in) {
this(in, 0, in.length);
}
/**
* Translates a character array representation of a
* {@code BigDecimal} into a {@code BigDecimal}, accepting the
* same sequence of characters as the {@link #BigDecimal(String)}
* constructor and with rounding according to the context
* settings.
*
* @implNote If the sequence of characters is already available
* as a character array, using this constructor is faster than
* converting the {@code char} array to string and using the
* {@code BigDecimal(String)} constructor.
*
* @param in {@code char} array that is the source of characters.
* @param mc the context to use.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @throws NumberFormatException if {@code in} is not a valid
* representation of a {@code BigDecimal}.
* @since 1.5
*/
public BigDecimal(char[] in, MathContext mc) {
this(in, 0, in.length, mc);
}
/**
* Translates the string representation of a {@code BigDecimal}
* into a {@code BigDecimal}. The string representation consists
* of an optional sign, {@code '+'} (<code> '\u002B'</code>) or
* {@code '-'} (<code>'\u002D'</code>), followed by a sequence of
* zero or more decimal digits ("the integer"), optionally
* followed by a fraction, optionally followed by an exponent.
*
* <p>The fraction consists of a decimal point followed by zero
* or more decimal digits. The string must contain at least one
* digit in either the integer or the fraction. The number formed
* by the sign, the integer and the fraction is referred to as the
* <i>significand</i>.
*
* <p>The exponent consists of the character {@code 'e'}
* (<code>'\u0065'</code>) or {@code 'E'} (<code>'\u0045'</code>)
* followed by one or more decimal digits. The value of the
* exponent must lie between -{@link Integer#MAX_VALUE} ({@link
* Integer#MIN_VALUE}+1) and {@link Integer#MAX_VALUE}, inclusive.
*
* <p>More formally, the strings this constructor accepts are
* described by the following grammar:
* <blockquote>
* <dl>
* <dt><i>BigDecimalString:</i>
* <dd><i>Sign<sub>opt</sub> Significand Exponent<sub>opt</sub></i>
* <dt><i>Sign:</i>
* <dd>{@code +}
* <dd>{@code -}
* <dt><i>Significand:</i>
* <dd><i>IntegerPart</i> {@code .} <i>FractionPart<sub>opt</sub></i>
* <dd>{@code .} <i>FractionPart</i>
* <dd><i>IntegerPart</i>
* <dt><i>IntegerPart:</i>
* <dd><i>Digits</i>
* <dt><i>FractionPart:</i>
* <dd><i>Digits</i>
* <dt><i>Exponent:</i>
* <dd><i>ExponentIndicator SignedInteger</i>
* <dt><i>ExponentIndicator:</i>
* <dd>{@code e}
* <dd>{@code E}
* <dt><i>SignedInteger:</i>
* <dd><i>Sign<sub>opt</sub> Digits</i>
* <dt><i>Digits:</i>
* <dd><i>Digit</i>
* <dd><i>Digits Digit</i>
* <dt><i>Digit:</i>
* <dd>any character for which {@link Character#isDigit}
* returns {@code true}, including 0, 1, 2 ...
* </dl>
* </blockquote>
*
* <p>The scale of the returned {@code BigDecimal} will be the
* number of digits in the fraction, or zero if the string
* contains no decimal point, subject to adjustment for any
* exponent; if the string contains an exponent, the exponent is
* subtracted from the scale. The value of the resulting scale
* must lie between {@code Integer.MIN_VALUE} and
* {@code Integer.MAX_VALUE}, inclusive.
*
* <p>The character-to-digit mapping is provided by {@link
* java.lang.Character#digit} set to convert to radix 10. The
* String may not contain any extraneous characters (whitespace,
* for example).
*
* <p><b>Examples:</b><br>
* The value of the returned {@code BigDecimal} is equal to
* <i>significand</i> × 10<sup> <i>exponent</i></sup>.
* For each string on the left, the resulting representation
* [{@code BigInteger}, {@code scale}] is shown on the right.
* <pre>
* "0" [0,0]
* "0.00" [0,2]
* "123" [123,0]
* "-123" [-123,0]
* "1.23E3" [123,-1]
* "1.23E+3" [123,-1]
* "12.3E+7" [123,-6]
* "12.0" [120,1]
* "12.3" [123,1]
* "0.00123" [123,5]
* "-1.23E-12" [-123,14]
* "1234.5E-4" [12345,5]
* "0E+7" [0,-7]
* "-0" [0,0]
* </pre>
*
* @apiNote For values other than {@code float} and
* {@code double} NaN and ±Infinity, this constructor is
* compatible with the values returned by {@link Float#toString}
* and {@link Double#toString}. This is generally the preferred
* way to convert a {@code float} or {@code double} into a
* BigDecimal, as it doesn't suffer from the unpredictability of
* the {@link #BigDecimal(double)} constructor.
*
* @param val String representation of {@code BigDecimal}.
*
* @throws NumberFormatException if {@code val} is not a valid
* representation of a {@code BigDecimal}.
*/
public BigDecimal(String val) {
this(val.toCharArray(), 0, val.length());
}
/**
* Translates the string representation of a {@code BigDecimal}
* into a {@code BigDecimal}, accepting the same strings as the
* {@link #BigDecimal(String)} constructor, with rounding
* according to the context settings.
*
* @param val string representation of a {@code BigDecimal}.
* @param mc the context to use.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @throws NumberFormatException if {@code val} is not a valid
* representation of a BigDecimal.
* @since 1.5
*/
public BigDecimal(String val, MathContext mc) {
this(val.toCharArray(), 0, val.length(), mc);
}
/**
* Translates a {@code double} into a {@code BigDecimal} which
* is the exact decimal representation of the {@code double}'s
* binary floating-point value. The scale of the returned
* {@code BigDecimal} is the smallest value such that
* <code>(10<sup>scale</sup> × val)</code> is an integer.
* <p>
* <b>Notes:</b>
* <ol>
* <li>
* The results of this constructor can be somewhat unpredictable.
* One might assume that writing {@code new BigDecimal(0.1)} in
* Java creates a {@code BigDecimal} which is exactly equal to
* 0.1 (an unscaled value of 1, with a scale of 1), but it is
* actually equal to
* 0.1000000000000000055511151231257827021181583404541015625.
* This is because 0.1 cannot be represented exactly as a
* {@code double} (or, for that matter, as a binary fraction of
* any finite length). Thus, the value that is being passed
* <em>in</em> to the constructor is not exactly equal to 0.1,
* appearances notwithstanding.
*
* <li>
* The {@code String} constructor, on the other hand, is
* perfectly predictable: writing {@code new BigDecimal("0.1")}
* creates a {@code BigDecimal} which is <em>exactly</em> equal to
* 0.1, as one would expect. Therefore, it is generally
* recommended that the {@linkplain #BigDecimal(String)
* String constructor} be used in preference to this one.
*
* <li>
* When a {@code double} must be used as a source for a
* {@code BigDecimal}, note that this constructor provides an
* exact conversion; it does not give the same result as
* converting the {@code double} to a {@code String} using the
* {@link Double#toString(double)} method and then using the
* {@link #BigDecimal(String)} constructor. To get that result,
* use the {@code static} {@link #valueOf(double)} method.
* </ol>
*
* @param val {@code double} value to be converted to
* {@code BigDecimal}.
* @throws NumberFormatException if {@code val} is infinite or NaN.
*/
public BigDecimal(double val) {
this(val,MathContext.UNLIMITED);
}
/**
* Translates a {@code double} into a {@code BigDecimal}, with
* rounding according to the context settings. The scale of the
* {@code BigDecimal} is the smallest value such that
* <code>(10<sup>scale</sup> × val)</code> is an integer.
*
* <p>The results of this constructor can be somewhat unpredictable
* and its use is generally not recommended; see the notes under
* the {@link #BigDecimal(double)} constructor.
*
* @param val {@code double} value to be converted to
* {@code BigDecimal}.
* @param mc the context to use.
* @throws ArithmeticException if the result is inexact but the
* RoundingMode is UNNECESSARY.
* @throws NumberFormatException if {@code val} is infinite or NaN.
* @since 1.5
*/
public BigDecimal(double val, MathContext mc) {
if (Double.isInfinite(val) || Double.isNaN(val))
throw new NumberFormatException("Infinite or NaN");
// Translate the double into sign, exponent and significand, according
// to the formulae in JLS, Section 20.10.22.
long valBits = Double.doubleToLongBits(val);
int sign = ((valBits >> 63) == 0 ? 1 : -1);
int exponent = (int) ((valBits >> 52) & 0x7ffL);
long significand = (exponent == 0
? (valBits & ((1L << 52) - 1)) << 1
: (valBits & ((1L << 52) - 1)) | (1L << 52));
exponent -= 1075;
// At this point, val == sign * significand * 2**exponent.
/*
* Special case zero to supress nonterminating normalization and bogus
* scale calculation.
*/
if (significand == 0) {
this.intVal = BigInteger.ZERO;
this.scale = 0;
this.intCompact = 0;
this.precision = 1;
return;
}
// Normalize
while ((significand & 1) == 0) { // i.e., significand is even
significand >>= 1;
exponent++;
}
int scl = 0;
// Calculate intVal and scale
BigInteger rb;
long compactVal = sign * significand;
if (exponent == 0) {
rb = (compactVal == INFLATED) ? INFLATED_BIGINT : null;
} else {
if (exponent < 0) {
rb = BigInteger.valueOf(5).pow(-exponent).multiply(compactVal);
scl = -exponent;
} else { // (exponent > 0)
rb = BigInteger.TWO.pow(exponent).multiply(compactVal);
}
compactVal = compactValFor(rb);
}
int prec = 0;
int mcp = mc.precision;
if (mcp > 0) { // do rounding
int mode = mc.roundingMode.oldMode;
int drop;
if (compactVal == INFLATED) {
prec = bigDigitLength(rb);
drop = prec - mcp;
while (drop > 0) {
scl = checkScaleNonZero((long) scl - drop);
rb = divideAndRoundByTenPow(rb, drop, mode);
compactVal = compactValFor(rb);
if (compactVal != INFLATED) {
break;
}
prec = bigDigitLength(rb);
drop = prec - mcp;
}
}
if (compactVal != INFLATED) {
prec = longDigitLength(compactVal);
drop = prec - mcp;
while (drop > 0) {
scl = checkScaleNonZero((long) scl - drop);
compactVal = divideAndRound(compactVal, LONG_TEN_POWERS_TABLE[drop], mc.roundingMode.oldMode);
prec = longDigitLength(compactVal);
drop = prec - mcp;
}
rb = null;
}
}
this.intVal = rb;
this.intCompact = compactVal;
this.scale = scl;
this.precision = prec;
}
/**
* Translates a {@code BigInteger} into a {@code BigDecimal}.
* The scale of the {@code BigDecimal} is zero.
*
* @param val {@code BigInteger} value to be converted to
* {@code BigDecimal}.
*/
public BigDecimal(BigInteger val) {
scale = 0;
intVal = val;
intCompact = compactValFor(val);
}
/**
* Translates a {@code BigInteger} into a {@code BigDecimal}
* rounding according to the context settings. The scale of the
* {@code BigDecimal} is zero.
*
* @param val {@code BigInteger} value to be converted to
* {@code BigDecimal}.
* @param mc the context to use.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @since 1.5
*/
public BigDecimal(BigInteger val, MathContext mc) {
this(val,0,mc);
}
/**
* Translates a {@code BigInteger} unscaled value and an
* {@code int} scale into a {@code BigDecimal}. The value of
* the {@code BigDecimal} is
* <code>(unscaledVal × 10<sup>-scale</sup>)</code>.
*
* @param unscaledVal unscaled value of the {@code BigDecimal}.
* @param scale scale of the {@code BigDecimal}.
*/
public BigDecimal(BigInteger unscaledVal, int scale) {
// Negative scales are now allowed
this.intVal = unscaledVal;
this.intCompact = compactValFor(unscaledVal);
this.scale = scale;
}
/**
* Translates a {@code BigInteger} unscaled value and an
* {@code int} scale into a {@code BigDecimal}, with rounding
* according to the context settings. The value of the
* {@code BigDecimal} is <code>(unscaledVal ×
* 10<sup>-scale</sup>)</code>, rounded according to the
* {@code precision} and rounding mode settings.
*
* @param unscaledVal unscaled value of the {@code BigDecimal}.
* @param scale scale of the {@code BigDecimal}.
* @param mc the context to use.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @since 1.5
*/
public BigDecimal(BigInteger unscaledVal, int scale, MathContext mc) {
long compactVal = compactValFor(unscaledVal);
int mcp = mc.precision;
int prec = 0;
if (mcp > 0) { // do rounding
int mode = mc.roundingMode.oldMode;
if (compactVal == INFLATED) {
prec = bigDigitLength(unscaledVal);
int drop = prec - mcp;
while (drop > 0) {
scale = checkScaleNonZero((long) scale - drop);
unscaledVal = divideAndRoundByTenPow(unscaledVal, drop, mode);
compactVal = compactValFor(unscaledVal);
if (compactVal != INFLATED) {
break;
}
prec = bigDigitLength(unscaledVal);
drop = prec - mcp;
}
}
if (compactVal != INFLATED) {
prec = longDigitLength(compactVal);
int drop = prec - mcp; // drop can't be more than 18
while (drop > 0) {
scale = checkScaleNonZero((long) scale - drop);
compactVal = divideAndRound(compactVal, LONG_TEN_POWERS_TABLE[drop], mode);
prec = longDigitLength(compactVal);
drop = prec - mcp;
}
unscaledVal = null;
}
}
this.intVal = unscaledVal;
this.intCompact = compactVal;
this.scale = scale;
this.precision = prec;
}
/**
* Translates an {@code int} into a {@code BigDecimal}. The
* scale of the {@code BigDecimal} is zero.
*
* @param val {@code int} value to be converted to
* {@code BigDecimal}.
* @since 1.5
*/
public BigDecimal(int val) {
this.intCompact = val;
this.scale = 0;
this.intVal = null;
}
/**
* Translates an {@code int} into a {@code BigDecimal}, with
* rounding according to the context settings. The scale of the
* {@code BigDecimal}, before any rounding, is zero.
*
* @param val {@code int} value to be converted to {@code BigDecimal}.
* @param mc the context to use.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @since 1.5
*/
public BigDecimal(int val, MathContext mc) {
int mcp = mc.precision;
long compactVal = val;
int scl = 0;
int prec = 0;
if (mcp > 0) { // do rounding
prec = longDigitLength(compactVal);
int drop = prec - mcp; // drop can't be more than 18
while (drop > 0) {
scl = checkScaleNonZero((long) scl - drop);
compactVal = divideAndRound(compactVal, LONG_TEN_POWERS_TABLE[drop], mc.roundingMode.oldMode);
prec = longDigitLength(compactVal);
drop = prec - mcp;
}
}
this.intVal = null;
this.intCompact = compactVal;
this.scale = scl;
this.precision = prec;
}
/**
* Translates a {@code long} into a {@code BigDecimal}. The
* scale of the {@code BigDecimal} is zero.
*
* @param val {@code long} value to be converted to {@code BigDecimal}.
* @since 1.5
*/
public BigDecimal(long val) {
this.intCompact = val;
this.intVal = (val == INFLATED) ? INFLATED_BIGINT : null;
this.scale = 0;
}
/**
* Translates a {@code long} into a {@code BigDecimal}, with
* rounding according to the context settings. The scale of the
* {@code BigDecimal}, before any rounding, is zero.
*
* @param val {@code long} value to be converted to {@code BigDecimal}.
* @param mc the context to use.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @since 1.5
*/
public BigDecimal(long val, MathContext mc) {
int mcp = mc.precision;
int mode = mc.roundingMode.oldMode;
int prec = 0;
int scl = 0;
BigInteger rb = (val == INFLATED) ? INFLATED_BIGINT : null;
if (mcp > 0) { // do rounding
if (val == INFLATED) {
prec = 19;
int drop = prec - mcp;
while (drop > 0) {
scl = checkScaleNonZero((long) scl - drop);
rb = divideAndRoundByTenPow(rb, drop, mode);
val = compactValFor(rb);
if (val != INFLATED) {
break;
}
prec = bigDigitLength(rb);
drop = prec - mcp;
}
}
if (val != INFLATED) {
prec = longDigitLength(val);
int drop = prec - mcp;
while (drop > 0) {
scl = checkScaleNonZero((long) scl - drop);
val = divideAndRound(val, LONG_TEN_POWERS_TABLE[drop], mc.roundingMode.oldMode);
prec = longDigitLength(val);
drop = prec - mcp;
}
rb = null;
}
}
this.intVal = rb;
this.intCompact = val;
this.scale = scl;
this.precision = prec;
}
// Static Factory Methods
/**
* Translates a {@code long} unscaled value and an
* {@code int} scale into a {@code BigDecimal}.
*
* @apiNote This static factory method is provided in preference
* to a ({@code long}, {@code int}) constructor because it allows
* for reuse of frequently used {@code BigDecimal} values.
*
* @param unscaledVal unscaled value of the {@code BigDecimal}.
* @param scale scale of the {@code BigDecimal}.
* @return a {@code BigDecimal} whose value is
* <code>(unscaledVal × 10<sup>-scale</sup>)</code>.
*/
public static BigDecimal valueOf(long unscaledVal, int scale) {
if (scale == 0)
return valueOf(unscaledVal);
else if (unscaledVal == 0) {
return zeroValueOf(scale);
}
return new BigDecimal(unscaledVal == INFLATED ?
INFLATED_BIGINT : null,
unscaledVal, scale, 0);
}
/**
* Translates a {@code long} value into a {@code BigDecimal}
* with a scale of zero.
*
* @apiNote This static factory method is provided in preference
* to a ({@code long}) constructor because it allows for reuse of
* frequently used {@code BigDecimal} values.
*
* @param val value of the {@code BigDecimal}.
* @return a {@code BigDecimal} whose value is {@code val}.
*/
public static BigDecimal valueOf(long val) {
if (val >= 0 && val < ZERO_THROUGH_TEN.length)
return ZERO_THROUGH_TEN[(int)val];
else if (val != INFLATED)
return new BigDecimal(null, val, 0, 0);
return new BigDecimal(INFLATED_BIGINT, val, 0, 0);
}
static BigDecimal valueOf(long unscaledVal, int scale, int prec) {
if (scale == 0 && unscaledVal >= 0 && unscaledVal < ZERO_THROUGH_TEN.length) {
return ZERO_THROUGH_TEN[(int) unscaledVal];
} else if (unscaledVal == 0) {
return zeroValueOf(scale);
}
return new BigDecimal(unscaledVal == INFLATED ? INFLATED_BIGINT : null,
unscaledVal, scale, prec);
}
static BigDecimal valueOf(BigInteger intVal, int scale, int prec) {
long val = compactValFor(intVal);
if (val == 0) {
return zeroValueOf(scale);
} else if (scale == 0 && val >= 0 && val < ZERO_THROUGH_TEN.length) {
return ZERO_THROUGH_TEN[(int) val];
}
return new BigDecimal(intVal, val, scale, prec);
}
static BigDecimal zeroValueOf(int scale) {
if (scale >= 0 && scale < ZERO_SCALED_BY.length)
return ZERO_SCALED_BY[scale];
else
return new BigDecimal(BigInteger.ZERO, 0, scale, 1);
}
/**
* Translates a {@code double} into a {@code BigDecimal}, using
* the {@code double}'s canonical string representation provided
* by the {@link Double#toString(double)} method.
*
* @apiNote This is generally the preferred way to convert a
* {@code double} (or {@code float}) into a {@code BigDecimal}, as
* the value returned is equal to that resulting from constructing
* a {@code BigDecimal} from the result of using {@link
* Double#toString(double)}.
*
* @param val {@code double} to convert to a {@code BigDecimal}.
* @return a {@code BigDecimal} whose value is equal to or approximately
* equal to the value of {@code val}.
* @throws NumberFormatException if {@code val} is infinite or NaN.
* @since 1.5
*/
public static BigDecimal valueOf(double val) {
// Reminder: a zero double returns '0.0', so we cannot fastpath
// to use the constant ZERO. This might be important enough to
// justify a factory approach, a cache, or a few private
// constants, later.
return new BigDecimal(Double.toString(val));
}
// Arithmetic Operations
/**
* Returns a {@code BigDecimal} whose value is {@code (this +
* augend)}, and whose scale is {@code max(this.scale(),
* augend.scale())}.
*
* @param augend value to be added to this {@code BigDecimal}.
* @return {@code this + augend}
*/
public BigDecimal add(BigDecimal augend) {
if (this.intCompact != INFLATED) {
if ((augend.intCompact != INFLATED)) {
return add(this.intCompact, this.scale, augend.intCompact, augend.scale);
} else {
return add(this.intCompact, this.scale, augend.intVal, augend.scale);
}
} else {
if ((augend.intCompact != INFLATED)) {
return add(augend.intCompact, augend.scale, this.intVal, this.scale);
} else {
return add(this.intVal, this.scale, augend.intVal, augend.scale);
}
}
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this + augend)},
* with rounding according to the context settings.
*
* If either number is zero and the precision setting is nonzero then
* the other number, rounded if necessary, is used as the result.
*
* @param augend value to be added to this {@code BigDecimal}.
* @param mc the context to use.
* @return {@code this + augend}, rounded as necessary.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @since 1.5
*/
public BigDecimal add(BigDecimal augend, MathContext mc) {
if (mc.precision == 0)
return add(augend);
BigDecimal lhs = this;
// If either number is zero then the other number, rounded and
// scaled if necessary, is used as the result.
{
boolean lhsIsZero = lhs.signum() == 0;
boolean augendIsZero = augend.signum() == 0;
if (lhsIsZero || augendIsZero) {
int preferredScale = Math.max(lhs.scale(), augend.scale());
BigDecimal result;
if (lhsIsZero && augendIsZero)
return zeroValueOf(preferredScale);
result = lhsIsZero ? doRound(augend, mc) : doRound(lhs, mc);
if (result.scale() == preferredScale)
return result;
else if (result.scale() > preferredScale) {
return stripZerosToMatchScale(result.intVal, result.intCompact, result.scale, preferredScale);
} else { // result.scale < preferredScale
int precisionDiff = mc.precision - result.precision();
int scaleDiff = preferredScale - result.scale();
if (precisionDiff >= scaleDiff)
return result.setScale(preferredScale); // can achieve target scale
else
return result.setScale(result.scale() + precisionDiff);
}
}
}
long padding = (long) lhs.scale - augend.scale;
if (padding != 0) { // scales differ; alignment needed
BigDecimal arg[] = preAlign(lhs, augend, padding, mc);
matchScale(arg);
lhs = arg[0];
augend = arg[1];
}
return doRound(lhs.inflated().add(augend.inflated()), lhs.scale, mc);
}
/**
* Returns an array of length two, the sum of whose entries is
* equal to the rounded sum of the {@code BigDecimal} arguments.
*
* <p>If the digit positions of the arguments have a sufficient
* gap between them, the value smaller in magnitude can be
* condensed into a {@literal "sticky bit"} and the end result will
* round the same way <em>if</em> the precision of the final
* result does not include the high order digit of the small
* magnitude operand.
*
* <p>Note that while strictly speaking this is an optimization,
* it makes a much wider range of additions practical.
*
* <p>This corresponds to a pre-shift operation in a fixed
* precision floating-point adder; this method is complicated by
* variable precision of the result as determined by the
* MathContext. A more nuanced operation could implement a
* {@literal "right shift"} on the smaller magnitude operand so
* that the number of digits of the smaller operand could be
* reduced even though the significands partially overlapped.
*/
private BigDecimal[] preAlign(BigDecimal lhs, BigDecimal augend, long padding, MathContext mc) {
assert padding != 0;
BigDecimal big;
BigDecimal small;
if (padding < 0) { // lhs is big; augend is small
big = lhs;
small = augend;
} else { // lhs is small; augend is big
big = augend;
small = lhs;
}
/*
* This is the estimated scale of an ulp of the result; it assumes that
* the result doesn't have a carry-out on a true add (e.g. 999 + 1 =>
* 1000) or any subtractive cancellation on borrowing (e.g. 100 - 1.2 =>
* 98.8)
*/
long estResultUlpScale = (long) big.scale - big.precision() + mc.precision;
/*
* The low-order digit position of big is big.scale(). This
* is true regardless of whether big has a positive or
* negative scale. The high-order digit position of small is
* small.scale - (small.precision() - 1). To do the full
* condensation, the digit positions of big and small must be
* disjoint *and* the digit positions of small should not be
* directly visible in the result.
*/
long smallHighDigitPos = (long) small.scale - small.precision() + 1;
if (smallHighDigitPos > big.scale + 2 && // big and small disjoint
smallHighDigitPos > estResultUlpScale + 2) { // small digits not visible
small = BigDecimal.valueOf(small.signum(), this.checkScale(Math.max(big.scale, estResultUlpScale) + 3));
}
// Since addition is symmetric, preserving input order in
// returned operands doesn't matter
BigDecimal[] result = {big, small};
return result;
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this -
* subtrahend)}, and whose scale is {@code max(this.scale(),
* subtrahend.scale())}.
*
* @param subtrahend value to be subtracted from this {@code BigDecimal}.
* @return {@code this - subtrahend}
*/
public BigDecimal subtract(BigDecimal subtrahend) {
if (this.intCompact != INFLATED) {
if ((subtrahend.intCompact != INFLATED)) {
return add(this.intCompact, this.scale, -subtrahend.intCompact, subtrahend.scale);
} else {
return add(this.intCompact, this.scale, subtrahend.intVal.negate(), subtrahend.scale);
}
} else {
if ((subtrahend.intCompact != INFLATED)) {
// Pair of subtrahend values given before pair of
// values from this BigDecimal to avoid need for
// method overloading on the specialized add method
return add(-subtrahend.intCompact, subtrahend.scale, this.intVal, this.scale);
} else {
return add(this.intVal, this.scale, subtrahend.intVal.negate(), subtrahend.scale);
}
}
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this - subtrahend)},
* with rounding according to the context settings.
*
* If {@code subtrahend} is zero then this, rounded if necessary, is used as the
* result. If this is zero then the result is {@code subtrahend.negate(mc)}.
*
* @param subtrahend value to be subtracted from this {@code BigDecimal}.
* @param mc the context to use.
* @return {@code this - subtrahend}, rounded as necessary.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @since 1.5
*/
public BigDecimal subtract(BigDecimal subtrahend, MathContext mc) {
if (mc.precision == 0)
return subtract(subtrahend);
// share the special rounding code in add()
return add(subtrahend.negate(), mc);
}
/**
* Returns a {@code BigDecimal} whose value is <code>(this ×
* multiplicand)</code>, and whose scale is {@code (this.scale() +
* multiplicand.scale())}.
*
* @param multiplicand value to be multiplied by this {@code BigDecimal}.
* @return {@code this * multiplicand}
*/
public BigDecimal multiply(BigDecimal multiplicand) {
int productScale = checkScale((long) scale + multiplicand.scale);
if (this.intCompact != INFLATED) {
if ((multiplicand.intCompact != INFLATED)) {
return multiply(this.intCompact, multiplicand.intCompact, productScale);
} else {
return multiply(this.intCompact, multiplicand.intVal, productScale);
}
} else {
if ((multiplicand.intCompact != INFLATED)) {
return multiply(multiplicand.intCompact, this.intVal, productScale);
} else {
return multiply(this.intVal, multiplicand.intVal, productScale);
}
}
}
/**
* Returns a {@code BigDecimal} whose value is <code>(this ×
* multiplicand)</code>, with rounding according to the context settings.
*
* @param multiplicand value to be multiplied by this {@code BigDecimal}.
* @param mc the context to use.
* @return {@code this * multiplicand}, rounded as necessary.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @since 1.5
*/
public BigDecimal multiply(BigDecimal multiplicand, MathContext mc) {
if (mc.precision == 0)
return multiply(multiplicand);
int productScale = checkScale((long) scale + multiplicand.scale);
if (this.intCompact != INFLATED) {
if ((multiplicand.intCompact != INFLATED)) {
return multiplyAndRound(this.intCompact, multiplicand.intCompact, productScale, mc);
} else {
return multiplyAndRound(this.intCompact, multiplicand.intVal, productScale, mc);
}
} else {
if ((multiplicand.intCompact != INFLATED)) {
return multiplyAndRound(multiplicand.intCompact, this.intVal, productScale, mc);
} else {
return multiplyAndRound(this.intVal, multiplicand.intVal, productScale, mc);
}
}
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this /
* divisor)}, and whose scale is as specified. If rounding must
* be performed to generate a result with the specified scale, the
* specified rounding mode is applied.
*
* @deprecated The method {@link #divide(BigDecimal, int, RoundingMode)}
* should be used in preference to this legacy method.
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @param scale scale of the {@code BigDecimal} quotient to be returned.
* @param roundingMode rounding mode to apply.
* @return {@code this / divisor}
* @throws ArithmeticException if {@code divisor} is zero,
* {@code roundingMode==ROUND_UNNECESSARY} and
* the specified scale is insufficient to represent the result
* of the division exactly.
* @throws IllegalArgumentException if {@code roundingMode} does not
* represent a valid rounding mode.
* @see #ROUND_UP
* @see #ROUND_DOWN
* @see #ROUND_CEILING
* @see #ROUND_FLOOR
* @see #ROUND_HALF_UP
* @see #ROUND_HALF_DOWN
* @see #ROUND_HALF_EVEN
* @see #ROUND_UNNECESSARY
*/
@Deprecated(since="9")
public BigDecimal divide(BigDecimal divisor, int scale, int roundingMode) {
if (roundingMode < ROUND_UP || roundingMode > ROUND_UNNECESSARY)
throw new IllegalArgumentException("Invalid rounding mode");
if (this.intCompact != INFLATED) {
if ((divisor.intCompact != INFLATED)) {
return divide(this.intCompact, this.scale, divisor.intCompact, divisor.scale, scale, roundingMode);
} else {
return divide(this.intCompact, this.scale, divisor.intVal, divisor.scale, scale, roundingMode);
}
} else {
if ((divisor.intCompact != INFLATED)) {
return divide(this.intVal, this.scale, divisor.intCompact, divisor.scale, scale, roundingMode);
} else {
return divide(this.intVal, this.scale, divisor.intVal, divisor.scale, scale, roundingMode);
}
}
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this /
* divisor)}, and whose scale is as specified. If rounding must
* be performed to generate a result with the specified scale, the
* specified rounding mode is applied.
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @param scale scale of the {@code BigDecimal} quotient to be returned.
* @param roundingMode rounding mode to apply.
* @return {@code this / divisor}
* @throws ArithmeticException if {@code divisor} is zero,
* {@code roundingMode==RoundingMode.UNNECESSARY} and
* the specified scale is insufficient to represent the result
* of the division exactly.
* @since 1.5
*/
public BigDecimal divide(BigDecimal divisor, int scale, RoundingMode roundingMode) {
return divide(divisor, scale, roundingMode.oldMode);
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this /
* divisor)}, and whose scale is {@code this.scale()}. If
* rounding must be performed to generate a result with the given
* scale, the specified rounding mode is applied.
*
* @deprecated The method {@link #divide(BigDecimal, RoundingMode)}
* should be used in preference to this legacy method.
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @param roundingMode rounding mode to apply.
* @return {@code this / divisor}
* @throws ArithmeticException if {@code divisor==0}, or
* {@code roundingMode==ROUND_UNNECESSARY} and
* {@code this.scale()} is insufficient to represent the result
* of the division exactly.
* @throws IllegalArgumentException if {@code roundingMode} does not
* represent a valid rounding mode.
* @see #ROUND_UP
* @see #ROUND_DOWN
* @see #ROUND_CEILING
* @see #ROUND_FLOOR
* @see #ROUND_HALF_UP
* @see #ROUND_HALF_DOWN
* @see #ROUND_HALF_EVEN
* @see #ROUND_UNNECESSARY
*/
@Deprecated(since="9")
public BigDecimal divide(BigDecimal divisor, int roundingMode) {
return this.divide(divisor, scale, roundingMode);
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this /
* divisor)}, and whose scale is {@code this.scale()}. If
* rounding must be performed to generate a result with the given
* scale, the specified rounding mode is applied.
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @param roundingMode rounding mode to apply.
* @return {@code this / divisor}
* @throws ArithmeticException if {@code divisor==0}, or
* {@code roundingMode==RoundingMode.UNNECESSARY} and
* {@code this.scale()} is insufficient to represent the result
* of the division exactly.
* @since 1.5
*/
public BigDecimal divide(BigDecimal divisor, RoundingMode roundingMode) {
return this.divide(divisor, scale, roundingMode.oldMode);
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this /
* divisor)}, and whose preferred scale is {@code (this.scale() -
* divisor.scale())}; if the exact quotient cannot be
* represented (because it has a non-terminating decimal
* expansion) an {@code ArithmeticException} is thrown.
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @throws ArithmeticException if the exact quotient does not have a
* terminating decimal expansion
* @return {@code this / divisor}
* @since 1.5
* @author Joseph D. Darcy
*/
public BigDecimal divide(BigDecimal divisor) {
/*
* Handle zero cases first.
*/
if (divisor.signum() == 0) { // x/0
if (this.signum() == 0) // 0/0
throw new ArithmeticException("Division undefined"); // NaN
throw new ArithmeticException("Division by zero");
}
// Calculate preferred scale
int preferredScale = saturateLong((long) this.scale - divisor.scale);
if (this.signum() == 0) // 0/y
return zeroValueOf(preferredScale);
else {
/*
* If the quotient this/divisor has a terminating decimal
* expansion, the expansion can have no more than
* (a.precision() + ceil(10*b.precision)/3) digits.
* Therefore, create a MathContext object with this
* precision and do a divide with the UNNECESSARY rounding
* mode.
*/
MathContext mc = new MathContext( (int)Math.min(this.precision() +
(long)Math.ceil(10.0*divisor.precision()/3.0),
Integer.MAX_VALUE),
RoundingMode.UNNECESSARY);
BigDecimal quotient;
try {
quotient = this.divide(divisor, mc);
} catch (ArithmeticException e) {
throw new ArithmeticException("Non-terminating decimal expansion; " +
"no exact representable decimal result.");
}
int quotientScale = quotient.scale();
// divide(BigDecimal, mc) tries to adjust the quotient to
// the desired one by removing trailing zeros; since the
// exact divide method does not have an explicit digit
// limit, we can add zeros too.
if (preferredScale > quotientScale)
return quotient.setScale(preferredScale, ROUND_UNNECESSARY);
return quotient;
}
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this /
* divisor)}, with rounding according to the context settings.
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @param mc the context to use.
* @return {@code this / divisor}, rounded as necessary.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY} or
* {@code mc.precision == 0} and the quotient has a
* non-terminating decimal expansion.
* @since 1.5
*/
public BigDecimal divide(BigDecimal divisor, MathContext mc) {
int mcp = mc.precision;
if (mcp == 0)
return divide(divisor);
BigDecimal dividend = this;
long preferredScale = (long)dividend.scale - divisor.scale;
// Now calculate the answer. We use the existing
// divide-and-round method, but as this rounds to scale we have
// to normalize the values here to achieve the desired result.
// For x/y we first handle y=0 and x=0, and then normalize x and
// y to give x' and y' with the following constraints:
// (a) 0.1 <= x' < 1
// (b) x' <= y' < 10*x'
// Dividing x'/y' with the required scale set to mc.precision then
// will give a result in the range 0.1 to 1 rounded to exactly
// the right number of digits (except in the case of a result of
// 1.000... which can arise when x=y, or when rounding overflows
// The 1.000... case will reduce properly to 1.
if (divisor.signum() == 0) { // x/0
if (dividend.signum() == 0) // 0/0
throw new ArithmeticException("Division undefined"); // NaN
throw new ArithmeticException("Division by zero");
}
if (dividend.signum() == 0) // 0/y
return zeroValueOf(saturateLong(preferredScale));
int xscale = dividend.precision();
int yscale = divisor.precision();
if(dividend.intCompact!=INFLATED) {
if(divisor.intCompact!=INFLATED) {
return divide(dividend.intCompact, xscale, divisor.intCompact, yscale, preferredScale, mc);
} else {
return divide(dividend.intCompact, xscale, divisor.intVal, yscale, preferredScale, mc);
}
} else {
if(divisor.intCompact!=INFLATED) {
return divide(dividend.intVal, xscale, divisor.intCompact, yscale, preferredScale, mc);
} else {
return divide(dividend.intVal, xscale, divisor.intVal, yscale, preferredScale, mc);
}
}
}
/**
* Returns a {@code BigDecimal} whose value is the integer part
* of the quotient {@code (this / divisor)} rounded down. The
* preferred scale of the result is {@code (this.scale() -
* divisor.scale())}.
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @return The integer part of {@code this / divisor}.
* @throws ArithmeticException if {@code divisor==0}
* @since 1.5
*/
public BigDecimal divideToIntegralValue(BigDecimal divisor) {
// Calculate preferred scale
int preferredScale = saturateLong((long) this.scale - divisor.scale);
if (this.compareMagnitude(divisor) < 0) {
// much faster when this << divisor
return zeroValueOf(preferredScale);
}
if (this.signum() == 0 && divisor.signum() != 0)
return this.setScale(preferredScale, ROUND_UNNECESSARY);
// Perform a divide with enough digits to round to a correct
// integer value; then remove any fractional digits
int maxDigits = (int)Math.min(this.precision() +
(long)Math.ceil(10.0*divisor.precision()/3.0) +
Math.abs((long)this.scale() - divisor.scale()) + 2,
Integer.MAX_VALUE);
BigDecimal quotient = this.divide(divisor, new MathContext(maxDigits,
RoundingMode.DOWN));
if (quotient.scale > 0) {
quotient = quotient.setScale(0, RoundingMode.DOWN);
quotient = stripZerosToMatchScale(quotient.intVal, quotient.intCompact, quotient.scale, preferredScale);
}
if (quotient.scale < preferredScale) {
// pad with zeros if necessary
quotient = quotient.setScale(preferredScale, ROUND_UNNECESSARY);
}
return quotient;
}
/**
* Returns a {@code BigDecimal} whose value is the integer part
* of {@code (this / divisor)}. Since the integer part of the
* exact quotient does not depend on the rounding mode, the
* rounding mode does not affect the values returned by this
* method. The preferred scale of the result is
* {@code (this.scale() - divisor.scale())}. An
* {@code ArithmeticException} is thrown if the integer part of
* the exact quotient needs more than {@code mc.precision}
* digits.
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @param mc the context to use.
* @return The integer part of {@code this / divisor}.
* @throws ArithmeticException if {@code divisor==0}
* @throws ArithmeticException if {@code mc.precision} {@literal >} 0 and the result
* requires a precision of more than {@code mc.precision} digits.
* @since 1.5
* @author Joseph D. Darcy
*/
public BigDecimal divideToIntegralValue(BigDecimal divisor, MathContext mc) {
if (mc.precision == 0 || // exact result
(this.compareMagnitude(divisor) < 0)) // zero result
return divideToIntegralValue(divisor);
// Calculate preferred scale
int preferredScale = saturateLong((long)this.scale - divisor.scale);
/*
* Perform a normal divide to mc.precision digits. If the
* remainder has absolute value less than the divisor, the
* integer portion of the quotient fits into mc.precision
* digits. Next, remove any fractional digits from the
* quotient and adjust the scale to the preferred value.
*/
BigDecimal result = this.divide(divisor, new MathContext(mc.precision, RoundingMode.DOWN));
if (result.scale() < 0) {
/*
* Result is an integer. See if quotient represents the
* full integer portion of the exact quotient; if it does,
* the computed remainder will be less than the divisor.
*/
BigDecimal product = result.multiply(divisor);
// If the quotient is the full integer value,
// |dividend-product| < |divisor|.
if (this.subtract(product).compareMagnitude(divisor) >= 0) {
throw new ArithmeticException("Division impossible");
}
} else if (result.scale() > 0) {
/*
* Integer portion of quotient will fit into precision
* digits; recompute quotient to scale 0 to avoid double
* rounding and then try to adjust, if necessary.
*/
result = result.setScale(0, RoundingMode.DOWN);
}
// else result.scale() == 0;
int precisionDiff;
if ((preferredScale > result.scale()) &&
(precisionDiff = mc.precision - result.precision()) > 0) {
return result.setScale(result.scale() +
Math.min(precisionDiff, preferredScale - result.scale) );
} else {
return stripZerosToMatchScale(result.intVal,result.intCompact,result.scale,preferredScale);
}
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this % divisor)}.
*
* <p>The remainder is given by
* {@code this.subtract(this.divideToIntegralValue(divisor).multiply(divisor))}.
* Note that this is <em>not</em> the modulo operation (the result can be
* negative).
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @return {@code this % divisor}.
* @throws ArithmeticException if {@code divisor==0}
* @since 1.5
*/
public BigDecimal remainder(BigDecimal divisor) {
BigDecimal divrem[] = this.divideAndRemainder(divisor);
return divrem[1];
}
/**
* Returns a {@code BigDecimal} whose value is {@code (this %
* divisor)}, with rounding according to the context settings.
* The {@code MathContext} settings affect the implicit divide
* used to compute the remainder. The remainder computation
* itself is by definition exact. Therefore, the remainder may
* contain more than {@code mc.getPrecision()} digits.
*
* <p>The remainder is given by
* {@code this.subtract(this.divideToIntegralValue(divisor,
* mc).multiply(divisor))}. Note that this is not the modulo
* operation (the result can be negative).
*
* @param divisor value by which this {@code BigDecimal} is to be divided.
* @param mc the context to use.
* @return {@code this % divisor}, rounded as necessary.
* @throws ArithmeticException if {@code divisor==0}
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}, or {@code mc.precision}
* {@literal >} 0 and the result of {@code this.divideToIntgralValue(divisor)} would
* require a precision of more than {@code mc.precision} digits.
* @see #divideToIntegralValue(java.math.BigDecimal, java.math.MathContext)
* @since 1.5
*/
public BigDecimal remainder(BigDecimal divisor, MathContext mc) {
BigDecimal divrem[] = this.divideAndRemainder(divisor, mc);
return divrem[1];
}
/**
* Returns a two-element {@code BigDecimal} array containing the
* result of {@code divideToIntegralValue} followed by the result of
* {@code remainder} on the two operands.
*
* <p>Note that if both the integer quotient and remainder are
* needed, this method is faster than using the
* {@code divideToIntegralValue} and {@code remainder} methods
* separately because the division need only be carried out once.
*
* @param divisor value by which this {@code BigDecimal} is to be divided,
* and the remainder computed.
* @return a two element {@code BigDecimal} array: the quotient
* (the result of {@code divideToIntegralValue}) is the initial element
* and the remainder is the final element.
* @throws ArithmeticException if {@code divisor==0}
* @see #divideToIntegralValue(java.math.BigDecimal, java.math.MathContext)
* @see #remainder(java.math.BigDecimal, java.math.MathContext)
* @since 1.5
*/
public BigDecimal[] divideAndRemainder(BigDecimal divisor) {
// we use the identity x = i * y + r to determine r
BigDecimal[] result = new BigDecimal[2];
result[0] = this.divideToIntegralValue(divisor);
result[1] = this.subtract(result[0].multiply(divisor));
return result;
}
/**
* Returns a two-element {@code BigDecimal} array containing the
* result of {@code divideToIntegralValue} followed by the result of
* {@code remainder} on the two operands calculated with rounding
* according to the context settings.
*
* <p>Note that if both the integer quotient and remainder are
* needed, this method is faster than using the
* {@code divideToIntegralValue} and {@code remainder} methods
* separately because the division need only be carried out once.
*
* @param divisor value by which this {@code BigDecimal} is to be divided,
* and the remainder computed.
* @param mc the context to use.
* @return a two element {@code BigDecimal} array: the quotient
* (the result of {@code divideToIntegralValue}) is the
* initial element and the remainder is the final element.
* @throws ArithmeticException if {@code divisor==0}
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}, or {@code mc.precision}
* {@literal >} 0 and the result of {@code this.divideToIntgralValue(divisor)} would
* require a precision of more than {@code mc.precision} digits.
* @see #divideToIntegralValue(java.math.BigDecimal, java.math.MathContext)
* @see #remainder(java.math.BigDecimal, java.math.MathContext)
* @since 1.5
*/
public BigDecimal[] divideAndRemainder(BigDecimal divisor, MathContext mc) {
if (mc.precision == 0)
return divideAndRemainder(divisor);
BigDecimal[] result = new BigDecimal[2];
BigDecimal lhs = this;
result[0] = lhs.divideToIntegralValue(divisor, mc);
result[1] = lhs.subtract(result[0].multiply(divisor));
return result;
}
/**
* Returns an approximation to the square root of {@code this}
* with rounding according to the context settings.
*
* <p>The preferred scale of the returned result is equal to
* {@code this.scale()/2}. The value of the returned result is
* always within one ulp of the exact decimal value for the
* precision in question. If the rounding mode is {@link
* RoundingMode#HALF_UP HALF_UP}, {@link RoundingMode#HALF_DOWN
* HALF_DOWN}, or {@link RoundingMode#HALF_EVEN HALF_EVEN}, the
* result is within one half an ulp of the exact decimal value.
*
* <p>Special case:
* <ul>
* <li> The square root of a number numerically equal to {@code
* ZERO} is numerically equal to {@code ZERO} with a preferred
* scale according to the general rule above. In particular, for
* {@code ZERO}, {@code ZERO.sqrt(mc).equals(ZERO)} is true with
* any {@code MathContext} as an argument.
* </ul>
*
* @param mc the context to use.
* @return the square root of {@code this}.
* @throws ArithmeticException if {@code this} is less than zero.
* @throws ArithmeticException if an exact result is requested
* ({@code mc.getPrecision()==0}) and there is no finite decimal
* expansion of the exact result
* @throws ArithmeticException if
* {@code (mc.getRoundingMode()==RoundingMode.UNNECESSARY}) and
* the exact result cannot fit in {@code mc.getPrecision()}
* digits.
* @see BigInteger#sqrt()
* @since 9
*/
public BigDecimal sqrt(MathContext mc) {
int signum = signum();
if (signum == 1) {
/*
* The following code draws on the algorithm presented in
* "Properly Rounded Variable Precision Square Root," Hull and
* Abrham, ACM Transactions on Mathematical Software, Vol 11,
* No. 3, September 1985, Pages 229-237.
*
* The BigDecimal computational model differs from the one
* presented in the paper in several ways: first BigDecimal
* numbers aren't necessarily normalized, second many more
* rounding modes are supported, including UNNECESSARY, and
* exact results can be requested.
*
* The main steps of the algorithm below are as follows,
* first argument reduce the value to the numerical range
* [1, 10) using the following relations:
*
* x = y * 10 ^ exp
* sqrt(x) = sqrt(y) * 10^(exp / 2) if exp is even
* sqrt(x) = sqrt(y/10) * 10 ^((exp+1)/2) is exp is odd
*
* Then use Newton's iteration on the reduced value to compute
* the numerical digits of the desired result.
*
* Finally, scale back to the desired exponent range and
* perform any adjustment to get the preferred scale in the
* representation.
*/
// The code below favors relative simplicity over checking
// for special cases that could run faster.
int preferredScale = this.scale()/2;
BigDecimal zeroWithFinalPreferredScale = valueOf(0L, preferredScale);
// First phase of numerical normalization, strip trailing
// zeros and check for even powers of 10.
BigDecimal stripped = this.stripTrailingZeros();
int strippedScale = stripped.scale();
// Numerically sqrt(10^2N) = 10^N
if (stripped.isPowerOfTen() &&
strippedScale % 2 == 0) {
BigDecimal result = valueOf(1L, strippedScale/2);
if (result.scale() != preferredScale) {
// Adjust to requested precision and preferred
// scale as appropriate.
result = result.add(zeroWithFinalPreferredScale, mc);
}
return result;
}
// After stripTrailingZeros, the representation is normalized as
//
// unscaledValue * 10^(-scale)
//
// where unscaledValue is an integer with the mimimum
// precision for the cohort of the numerical value. To
// allow binary floating-point hardware to be used to get
// approximately a 15 digit approximation to the square
// root, it is helpful to instead normalize this so that
// the significand portion is to right of the decimal
// point by roughly (scale() - precision() +1).
// Now the precision / scale adjustment
int scaleAdjust = 0;
int scale = stripped.scale() - stripped.precision() + 1;
if (scale % 2 == 0) {
scaleAdjust = scale;
} else {
scaleAdjust = scale - 1;
}
BigDecimal working = stripped.scaleByPowerOfTen(scaleAdjust);
assert // Verify 0.1 <= working < 10
ONE_TENTH.compareTo(working) <= 0 && working.compareTo(TEN) < 0;
// Use good ole' Math.sqrt to get the initial guess for
// the Newton iteration, good to at least 15 decimal
// digits. This approach does incur the cost of a
//
// BigDecimal -> double -> BigDecimal
//
// conversion cycle, but it avoids the need for several
// Newton iterations in BigDecimal arithmetic to get the
// working answer to 15 digits of precision. If many fewer
// than 15 digits were needed, it might be faster to do
// the loop entirely in BigDecimal arithmetic.
//
// (A double value might have as much many as 17 decimal
// digits of precision; it depends on the relative density
// of binary and decimal numbers at different regions of
// the number line.)
//
// (It would be possible to check for certain special
// cases to avoid doing any Newton iterations. For
// example, if the BigDecimal -> double conversion was
// known to be exact and the rounding mode had a
// low-enough precision, the post-Newton rounding logic
// could be applied directly.)
BigDecimal guess = new BigDecimal(Math.sqrt(working.doubleValue()));
int guessPrecision = 15;
int originalPrecision = mc.getPrecision();
int targetPrecision;
// If an exact value is requested, it must only need about
// half of the input digits to represent since multiplying
// an N digit number by itself yield a 2N-1 digit or 2N
// digit result.
if (originalPrecision == 0) {
targetPrecision = stripped.precision()/2 + 1;
} else {
targetPrecision = originalPrecision;
}
// When setting the precision to use inside the Newton
// iteration loop, take care to avoid the case where the
// precision of the input exceeds the requested precision
// and rounding the input value too soon.
BigDecimal approx = guess;
int workingPrecision = working.precision();
do {
int tmpPrecision = Math.max(Math.max(guessPrecision, targetPrecision + 2),
workingPrecision);
MathContext mcTmp = new MathContext(tmpPrecision, RoundingMode.HALF_EVEN);
// approx = 0.5 * (approx + fraction / approx)
approx = ONE_HALF.multiply(approx.add(working.divide(approx, mcTmp), mcTmp));
guessPrecision *= 2;
} while (guessPrecision < targetPrecision + 2);
BigDecimal result;
RoundingMode targetRm = mc.getRoundingMode();
if (targetRm == RoundingMode.UNNECESSARY || originalPrecision == 0) {
RoundingMode tmpRm =
(targetRm == RoundingMode.UNNECESSARY) ? RoundingMode.DOWN : targetRm;
MathContext mcTmp = new MathContext(targetPrecision, tmpRm);
result = approx.scaleByPowerOfTen(-scaleAdjust/2).round(mcTmp);
// If result*result != this numerically, the square
// root isn't exact
if (this.subtract(result.multiply(result)).compareTo(ZERO) != 0) {
throw new ArithmeticException("Computed square root not exact.");
}
} else {
result = approx.scaleByPowerOfTen(-scaleAdjust/2).round(mc);
}
if (result.scale() != preferredScale) {
// The preferred scale of an add is
// max(addend.scale(), augend.scale()). Therefore, if
// the scale of the result is first minimized using
// stripTrailingZeros(), adding a zero of the
// preferred scale rounding the correct precision will
// perform the proper scale vs precision tradeoffs.
result = result.stripTrailingZeros().
add(zeroWithFinalPreferredScale,
new MathContext(originalPrecision, RoundingMode.UNNECESSARY));
}
assert squareRootResultAssertions(result, mc);
return result;
} else {
switch (signum) {
case -1:
throw new ArithmeticException("Attempted square root " +
"of negative BigDecimal");
case 0:
return valueOf(0L, scale()/2);
default:
throw new AssertionError("Bad value from signum");
}
}
}
private boolean isPowerOfTen() {
return BigInteger.ONE.equals(this.unscaledValue());
}
/**
* For nonzero values, check numerical correctness properties of
* the computed result for the chosen rounding mode.
*
* For the directed roundings, for DOWN and FLOOR, result^2 must
* be {@code <=} the input and (result+ulp)^2 must be {@code >} the
* input. Conversely, for UP and CEIL, result^2 must be {@code >=} the
* input and (result-ulp)^2 must be {@code <} the input.
*/
private boolean squareRootResultAssertions(BigDecimal result, MathContext mc) {
if (result.signum() == 0) {
return squareRootZeroResultAssertions(result, mc);
} else {
RoundingMode rm = mc.getRoundingMode();
BigDecimal ulp = result.ulp();
BigDecimal neighborUp = result.add(ulp);
// Make neighbor down accurate even for powers of ten
if (this.isPowerOfTen()) {
ulp = ulp.divide(TEN);
}
BigDecimal neighborDown = result.subtract(ulp);
// Both the starting value and result should be nonzero and positive.
if (result.signum() != 1 ||
this.signum() != 1) {
return false;
}
switch (rm) {
case DOWN:
case FLOOR:
return
result.multiply(result).compareTo(this) <= 0 &&
neighborUp.multiply(neighborUp).compareTo(this) > 0;
case UP:
case CEILING:
return
result.multiply(result).compareTo(this) >= 0 &&
neighborDown.multiply(neighborDown).compareTo(this) < 0;
case HALF_DOWN:
case HALF_EVEN:
case HALF_UP:
BigDecimal err = result.multiply(result).subtract(this).abs();
BigDecimal errUp = neighborUp.multiply(neighborUp).subtract(this);
BigDecimal errDown = this.subtract(neighborDown.multiply(neighborDown));
// All error values should be positive so don't need to
// compare absolute values.
int err_comp_errUp = err.compareTo(errUp);
int err_comp_errDown = err.compareTo(errDown);
return
errUp.signum() == 1 &&
errDown.signum() == 1 &&
err_comp_errUp <= 0 &&
err_comp_errDown <= 0 &&
((err_comp_errUp == 0 ) ? err_comp_errDown < 0 : true) &&
((err_comp_errDown == 0 ) ? err_comp_errUp < 0 : true);
// && could check for digit conditions for ties too
default: // Definition of UNNECESSARY already verified.
return true;
}
}
}
private boolean squareRootZeroResultAssertions(BigDecimal result, MathContext mc) {
return this.compareTo(ZERO) == 0;
}
/**
* Returns a {@code BigDecimal} whose value is
* <code>(this<sup>n</sup>)</code>, The power is computed exactly, to
* unlimited precision.
*
* <p>The parameter {@code n} must be in the range 0 through
* 999999999, inclusive. {@code ZERO.pow(0)} returns {@link
* #ONE}.
*
* Note that future releases may expand the allowable exponent
* range of this method.
*
* @param n power to raise this {@code BigDecimal} to.
* @return <code>this<sup>n</sup></code>
* @throws ArithmeticException if {@code n} is out of range.
* @since 1.5
*/
public BigDecimal pow(int n) {
if (n < 0 || n > 999999999)
throw new ArithmeticException("Invalid operation");
// No need to calculate pow(n) if result will over/underflow.
// Don't attempt to support "supernormal" numbers.
int newScale = checkScale((long)scale * n);
return new BigDecimal(this.inflated().pow(n), newScale);
}
/**
* Returns a {@code BigDecimal} whose value is
* <code>(this<sup>n</sup>)</code>. The current implementation uses
* the core algorithm defined in ANSI standard X3.274-1996 with
* rounding according to the context settings. In general, the
* returned numerical value is within two ulps of the exact
* numerical value for the chosen precision. Note that future
* releases may use a different algorithm with a decreased
* allowable error bound and increased allowable exponent range.
*
* <p>The X3.274-1996 algorithm is:
*
* <ul>
* <li> An {@code ArithmeticException} exception is thrown if
* <ul>
* <li>{@code abs(n) > 999999999}
* <li>{@code mc.precision == 0} and {@code n < 0}
* <li>{@code mc.precision > 0} and {@code n} has more than
* {@code mc.precision} decimal digits
* </ul>
*
* <li> if {@code n} is zero, {@link #ONE} is returned even if
* {@code this} is zero, otherwise
* <ul>
* <li> if {@code n} is positive, the result is calculated via
* the repeated squaring technique into a single accumulator.
* The individual multiplications with the accumulator use the
* same math context settings as in {@code mc} except for a
* precision increased to {@code mc.precision + elength + 1}
* where {@code elength} is the number of decimal digits in
* {@code n}.
*
* <li> if {@code n} is negative, the result is calculated as if
* {@code n} were positive; this value is then divided into one
* using the working precision specified above.
*
* <li> The final value from either the positive or negative case
* is then rounded to the destination precision.
* </ul>
* </ul>
*
* @param n power to raise this {@code BigDecimal} to.
* @param mc the context to use.
* @return <code>this<sup>n</sup></code> using the ANSI standard X3.274-1996
* algorithm
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}, or {@code n} is out
* of range.
* @since 1.5
*/
public BigDecimal pow(int n, MathContext mc) {
if (mc.precision == 0)
return pow(n);
if (n < -999999999 || n > 999999999)
throw new ArithmeticException("Invalid operation");
if (n == 0)
return ONE; // x**0 == 1 in X3.274
BigDecimal lhs = this;
MathContext workmc = mc; // working settings
int mag = Math.abs(n); // magnitude of n
if (mc.precision > 0) {
int elength = longDigitLength(mag); // length of n in digits
if (elength > mc.precision) // X3.274 rule
throw new ArithmeticException("Invalid operation");
workmc = new MathContext(mc.precision + elength + 1,
mc.roundingMode);
}
// ready to carry out power calculation...
BigDecimal acc = ONE; // accumulator
boolean seenbit = false; // set once we've seen a 1-bit
for (int i=1;;i++) { // for each bit [top bit ignored]
mag += mag; // shift left 1 bit
if (mag < 0) { // top bit is set
seenbit = true; // OK, we're off
acc = acc.multiply(lhs, workmc); // acc=acc*x
}
if (i == 31)
break; // that was the last bit
if (seenbit)
acc=acc.multiply(acc, workmc); // acc=acc*acc [square]
// else (!seenbit) no point in squaring ONE
}
// if negative n, calculate the reciprocal using working precision
if (n < 0) // [hence mc.precision>0]
acc=ONE.divide(acc, workmc);
// round to final precision and strip zeros
return doRound(acc, mc);
}
/**
* Returns a {@code BigDecimal} whose value is the absolute value
* of this {@code BigDecimal}, and whose scale is
* {@code this.scale()}.
*
* @return {@code abs(this)}
*/
public BigDecimal abs() {
return (signum() < 0 ? negate() : this);
}
/**
* Returns a {@code BigDecimal} whose value is the absolute value
* of this {@code BigDecimal}, with rounding according to the
* context settings.
*
* @param mc the context to use.
* @return {@code abs(this)}, rounded as necessary.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @since 1.5
*/
public BigDecimal abs(MathContext mc) {
return (signum() < 0 ? negate(mc) : plus(mc));
}
/**
* Returns a {@code BigDecimal} whose value is {@code (-this)},
* and whose scale is {@code this.scale()}.
*
* @return {@code -this}.
*/
public BigDecimal negate() {
if (intCompact == INFLATED) {
return new BigDecimal(intVal.negate(), INFLATED, scale, precision);
} else {
return valueOf(-intCompact, scale, precision);
}
}
/**
* Returns a {@code BigDecimal} whose value is {@code (-this)},
* with rounding according to the context settings.
*
* @param mc the context to use.
* @return {@code -this}, rounded as necessary.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @since 1.5
*/
public BigDecimal negate(MathContext mc) {
return negate().plus(mc);
}
/**
* Returns a {@code BigDecimal} whose value is {@code (+this)}, and whose
* scale is {@code this.scale()}.
*
* <p>This method, which simply returns this {@code BigDecimal}
* is included for symmetry with the unary minus method {@link
* #negate()}.
*
* @return {@code this}.
* @see #negate()
* @since 1.5
*/
public BigDecimal plus() {
return this;
}
/**
* Returns a {@code BigDecimal} whose value is {@code (+this)},
* with rounding according to the context settings.
*
* <p>The effect of this method is identical to that of the {@link
* #round(MathContext)} method.
*
* @param mc the context to use.
* @return {@code this}, rounded as necessary. A zero result will
* have a scale of 0.
* @throws ArithmeticException if the result is inexact but the
* rounding mode is {@code UNNECESSARY}.
* @see #round(MathContext)
* @since 1.5
*/
public BigDecimal plus(MathContext mc) {
if (mc.precision == 0) // no rounding please
return this;
return doRound(this, mc);
}
/**
* Returns the signum function of this {@code BigDecimal}.
*
* @return -1, 0, or 1 as the value of this {@code BigDecimal}
* is negative, zero, or positive.
*/
public int signum() {
return (intCompact != INFLATED)?
Long.signum(intCompact):
intVal.signum();
}
/**
* Returns the <i>scale</i> of this {@code BigDecimal}. If zero
* or positive, the scale is the number of digits to the right of
* the decimal point. If negative, the unscaled value of the
* number is multiplied by ten to the power of the negation of the
* scale. For example, a scale of {@code -3} means the unscaled
* value is multiplied by 1000.
*
* @return the scale of this {@code BigDecimal}.
*/
public int scale() {
return scale;
}
/**
* Returns the <i>precision</i> of this {@code BigDecimal}. (The
* precision is the number of digits in the unscaled value.)
*
* <p>The precision of a zero value is 1.
*
* @return the precision of this {@code BigDecimal}.
* @since 1.5
*/
public int precision() {
int result = precision;
if (result == 0) {
long s = intCompact;
if (s != INFLATED)
result = longDigitLength(s);
else
result = bigDigitLength(intVal);
precision = result;
}
return result;
}
/**
* Returns a {@code BigInteger} whose value is the <i>unscaled
* value</i> of this {@code BigDecimal}. (Computes <code>(this *
* 10<sup>this.scale()</sup>)</code>.)
*
* @return the unscaled value of this {@code BigDecimal}.
* @since 1.2
*/
public BigInteger unscaledValue() {
return this.inflated();
}
// Rounding Modes
/**
* Rounding mode to round away from zero. Always increments the
* digit prior to a nonzero discarded fraction. Note that this rounding
* mode never decreases the magnitude of the calculated value.
*
* @deprecated Use {@link RoundingMode#UP} instead.
*/
@Deprecated(since="9")
public static final int ROUND_UP = 0;
/**
* Rounding mode to round towards zero. Never increments the digit
* prior to a discarded fraction (i.e., truncates). Note that this
* rounding mode never increases the magnitude of the calculated value.
*
* @deprecated Use {@link RoundingMode#DOWN} instead.
*/
@Deprecated(since="9")
public static final int ROUND_DOWN = 1;
/**
* Rounding mode to round towards positive infinity. If the
* {@code BigDecimal} is positive, behaves as for
* {@code ROUND_UP}; if negative, behaves as for
* {@code ROUND_DOWN}. Note that this rounding mode never
* decreases the calculated value.
*
* @deprecated Use {@link RoundingMode#CEILING} instead.
*/
@Deprecated(since="9")
public static final int ROUND_CEILING = 2;
/**
* Rounding mode to round towards negative infinity. If the
* {@code BigDecimal} is positive, behave as for
* {@code ROUND_DOWN}; if negative, behave as for
* {@code ROUND_UP}. Note that this rounding mode never
* increases the calculated value.
*
* @deprecated Use {@link RoundingMode#FLOOR} instead.
*/
@Deprecated(since="9")
public static final int ROUND_FLOOR = 3;
/**
* Rounding mode to round towards {@literal "nearest neighbor"}
* unless both neighbors are equidistant, in which case round up.
* Behaves as for {@code ROUND_UP} if the discarded fraction is
* ≥ 0.5; otherwise, behaves as for {@code ROUND_DOWN}. Note
* that this is the rounding mode that most of us were taught in
* grade school.
*
* @deprecated Use {@link RoundingMode#HALF_UP} instead.
*/
@Deprecated(since="9")
public static final int ROUND_HALF_UP = 4;
/**
* Rounding mode to round towards {@literal "nearest neighbor"}
* unless both neighbors are equidistant, in which case round
* down. Behaves as for {@code ROUND_UP} if the discarded
* fraction is {@literal >} 0.5; otherwise, behaves as for
* {@code ROUND_DOWN}.
*
* @deprecated Use {@link RoundingMode#HALF_DOWN} instead.
*/
@Deprecated(since="9")
public static final int ROUND_HALF_DOWN = 5;
/**
* Rounding mode to round towards the {@literal "nearest neighbor"}
* unless both neighbors are equidistant, in which case, round
* towards the even neighbor. Behaves as for
* {@code ROUND_HALF_UP} if the digit to the left of the
* discarded fraction is odd; behaves as for
* {@code ROUND_HALF_DOWN} if it's even. Note that this is the
* rounding mode that minimizes cumulative error when applied
* repeatedly over a sequence of calculations.
*
* @deprecated Use {@link RoundingMode#HALF_EVEN} instead.
*/
@Deprecated(since="9")
public static final int ROUND_HALF_EVEN = 6;
/**
* Rounding mode to assert that the requested operation has an exact
* result, hence no rounding is necessary. If this rounding mode is
* specified on an operation that yields an inexact result, an
* {@code ArithmeticException} is thrown.
*
* @deprecated Use {@link RoundingMode#UNNECESSARY} instead.
*/
@Deprecated(since="9")
public static final int ROUND_UNNECESSARY = 7;
// Scaling/Rounding Operations
/**
* Returns a {@code BigDecimal} rounded according to the
* {@code MathContext} settings. If the precision setting is 0 then
* no rounding takes place.
*
* <p>The effect of this method is identical to that of the
* {@link #plus(MathContext)} method.
*
* @param mc the context to use.
* @return a {@code BigDecimal} rounded according to the
* {@code MathContext} settings.
* @throws ArithmeticException if the rounding mode is
* {@code UNNECESSARY} and the
* {@code BigDecimal} operation would require rounding.
* @see #plus(MathContext)
* @since 1.5
*/
public BigDecimal round(MathContext mc) {
return plus(mc);
}
/**
* Returns a {@code BigDecimal} whose scale is the specified
* value, and whose unscaled value is determined by multiplying or
* dividing this {@code BigDecimal}'s unscaled value by the
* appropriate power of ten to maintain its overall value. If the
* scale is reduced by the operation, the unscaled value must be
* divided (rather than multiplied), and the value may be changed;
* in this case, the specified rounding mode is applied to the
* division.
*
* @apiNote Since BigDecimal objects are immutable, calls of
* this method do <em>not</em> result in the original object being
* modified, contrary to the usual convention of having methods
* named <code>set<i>X</i></code> mutate field <i>{@code X}</i>.
* Instead, {@code setScale} returns an object with the proper
* scale; the returned object may or may not be newly allocated.
*
* @param newScale scale of the {@code BigDecimal} value to be returned.
* @param roundingMode The rounding mode to apply.
* @return a {@code BigDecimal} whose scale is the specified value,
* and whose unscaled value is determined by multiplying or
* dividing this {@code BigDecimal}'s unscaled value by the
* appropriate power of ten to maintain its overall value.
* @throws ArithmeticException if {@code roundingMode==UNNECESSARY}
* and the specified scaling operation would require
* rounding.
* @see RoundingMode
* @since 1.5
*/
public BigDecimal setScale(int newScale, RoundingMode roundingMode) {
return setScale(newScale, roundingMode.oldMode);
}
/**
* Returns a {@code BigDecimal} whose scale is the specified
* value, and whose unscaled value is determined by multiplying or
* dividing this {@code BigDecimal}'s unscaled value by the
* appropriate power of ten to maintain its overall value. If the
* scale is reduced by the operation, the unscaled value must be
* divided (rather than multiplied), and the value may be changed;
* in this case, the specified rounding mode is applied to the
* division.
*
* @apiNote Since BigDecimal objects are immutable, calls of
* this method do <em>not</em> result in the original object being
* modified, contrary to the usual convention of having methods
* named <code>set<i>X</i></code> mutate field <i>{@code X}</i>.
* Instead, {@code setScale} returns an object with the proper
* scale; the returned object may or may not be newly allocated.
*
* @deprecated The method {@link #setScale(int, RoundingMode)} should
* be used in preference to this legacy method.
*
* @param newScale scale of the {@code BigDecimal} value to be returned.
* @param roundingMode The rounding mode to apply.
* @return a {@code BigDecimal} whose scale is the specified value,
* and whose unscaled value is determined by multiplying or
* dividing this {@code BigDecimal}'s unscaled value by the
* appropriate power of ten to maintain its overall value.
* @throws ArithmeticException if {@code roundingMode==ROUND_UNNECESSARY}
* and the specified scaling operation would require
* rounding.
* @throws IllegalArgumentException if {@code roundingMode} does not
* represent a valid rounding mode.
* @see #ROUND_UP
* @see #ROUND_DOWN
* @see #ROUND_CEILING
* @see #ROUND_FLOOR
* @see #ROUND_HALF_UP
* @see #ROUND_HALF_DOWN
* @see #ROUND_HALF_EVEN
* @see #ROUND_UNNECESSARY
*/
@Deprecated(since="9")
public BigDecimal setScale(int newScale, int roundingMode) {
if (roundingMode < ROUND_UP || roundingMode > ROUND_UNNECESSARY)
throw new IllegalArgumentException("Invalid rounding mode");
int oldScale = this.scale;
if (newScale == oldScale) // easy case
return this;
if (this.signum() == 0) // zero can have any scale
return zeroValueOf(newScale);
if(this.intCompact!=INFLATED) {
long rs = this.intCompact;
if (newScale > oldScale) {
int raise = checkScale((long) newScale - oldScale);
if ((rs = longMultiplyPowerTen(rs, raise)) != INFLATED) {
return valueOf(rs,newScale);
}
BigInteger rb = bigMultiplyPowerTen(raise);
return new BigDecimal(rb, INFLATED, newScale, (precision > 0) ? precision + raise : 0);
} else {
// newScale < oldScale -- drop some digits
// Can't predict the precision due to the effect of rounding.
int drop = checkScale((long) oldScale - newScale);
if (drop < LONG_TEN_POWERS_TABLE.length) {
return divideAndRound(rs, LONG_TEN_POWERS_TABLE[drop], newScale, roundingMode, newScale);
} else {
return divideAndRound(this.inflated(), bigTenToThe(drop), newScale, roundingMode, newScale);
}
}
} else {
if (newScale > oldScale) {
int raise = checkScale((long) newScale - oldScale);
BigInteger rb = bigMultiplyPowerTen(this.intVal,raise);
return new BigDecimal(rb, INFLATED, newScale, (precision > 0) ? precision + raise : 0);
} else {
// newScale < oldScale -- drop some digits
// Can't predict the precision due to the effect of rounding.
int drop = checkScale((long) oldScale - newScale);
if (drop < LONG_TEN_POWERS_TABLE.length)
return divideAndRound(this.intVal, LONG_TEN_POWERS_TABLE[drop], newScale, roundingMode,
newScale);
else
return divideAndRound(this.intVal, bigTenToThe(drop), newScale, roundingMode, newScale);
}
}
}
/**
* Returns a {@code BigDecimal} whose scale is the specified
* value, and whose value is numerically equal to this
* {@code BigDecimal}'s. Throws an {@code ArithmeticException}
* if this is not possible.
*
* <p>This call is typically used to increase the scale, in which
* case it is guaranteed that there exists a {@code BigDecimal}
* of the specified scale and the correct value. The call can
* also be used to reduce the scale if the caller knows that the
* {@code BigDecimal} has sufficiently many zeros at the end of
* its fractional part (i.e., factors of ten in its integer value)
* to allow for the rescaling without changing its value.
*
* <p>This method returns the same result as the two-argument
* versions of {@code setScale}, but saves the caller the trouble
* of specifying a rounding mode in cases where it is irrelevant.
*
* @apiNote Since {@code BigDecimal} objects are immutable,
* calls of this method do <em>not</em> result in the original
* object being modified, contrary to the usual convention of
* having methods named <code>set<i>X</i></code> mutate field
* <i>{@code X}</i>. Instead, {@code setScale} returns an
* object with the proper scale; the returned object may or may
* not be newly allocated.
*
* @param newScale scale of the {@code BigDecimal} value to be returned.
* @return a {@code BigDecimal} whose scale is the specified value, and
* whose unscaled value is determined by multiplying or dividing
* this {@code BigDecimal}'s unscaled value by the appropriate
* power of ten to maintain its overall value.
* @throws ArithmeticException if the specified scaling operation would
* require rounding.
* @see #setScale(int, int)
* @see #setScale(int, RoundingMode)
*/
public BigDecimal setScale(int newScale) {
return setScale(newScale, ROUND_UNNECESSARY);
}
// Decimal Point Motion Operations
/**
* Returns a {@code BigDecimal} which is equivalent to this one
* with the decimal point moved {@code n} places to the left. If
* {@code n} is non-negative, the call merely adds {@code n} to
* the scale. If {@code n} is negative, the call is equivalent
* to {@code movePointRight(-n)}. The {@code BigDecimal}
* returned by this call has value <code>(this ×
* 10<sup>-n</sup>)</code> and scale {@code max(this.scale()+n,
* 0)}.
*
* @param n number of places to move the decimal point to the left.
* @return a {@code BigDecimal} which is equivalent to this one with the
* decimal point moved {@code n} places to the left.
* @throws ArithmeticException if scale overflows.
*/
public BigDecimal movePointLeft(int n) {
// Cannot use movePointRight(-n) in case of n==Integer.MIN_VALUE
int newScale = checkScale((long)scale + n);
BigDecimal num = new BigDecimal(intVal, intCompact, newScale, 0);
return num.scale < 0 ? num.setScale(0, ROUND_UNNECESSARY) : num;
}
/**
* Returns a {@code BigDecimal} which is equivalent to this one
* with the decimal point moved {@code n} places to the right.
* If {@code n} is non-negative, the call merely subtracts
* {@code n} from the scale. If {@code n} is negative, the call
* is equivalent to {@code movePointLeft(-n)}. The
* {@code BigDecimal} returned by this call has value <code>(this
* × 10<sup>n</sup>)</code> and scale {@code max(this.scale()-n,
* 0)}.
*
* @param n number of places to move the decimal point to the right.
* @return a {@code BigDecimal} which is equivalent to this one
* with the decimal point moved {@code n} places to the right.
* @throws ArithmeticException if scale overflows.
*/
public BigDecimal movePointRight(int n) {
// Cannot use movePointLeft(-n) in case of n==Integer.MIN_VALUE
int newScale = checkScale((long)scale - n);
BigDecimal num = new BigDecimal(intVal, intCompact, newScale, 0);
return num.scale < 0 ? num.setScale(0, ROUND_UNNECESSARY) : num;
}
/**
* Returns a BigDecimal whose numerical value is equal to
* ({@code this} * 10<sup>n</sup>). The scale of
* the result is {@code (this.scale() - n)}.
*
* @param n the exponent power of ten to scale by
* @return a BigDecimal whose numerical value is equal to
* ({@code this} * 10<sup>n</sup>)
* @throws ArithmeticException if the scale would be
* outside the range of a 32-bit integer.
*
* @since 1.5
*/
public BigDecimal scaleByPowerOfTen(int n) {
return new BigDecimal(intVal, intCompact,
checkScale((long)scale - n), precision);
}
/**
* Returns a {@code BigDecimal} which is numerically equal to
* this one but with any trailing zeros removed from the
* representation. For example, stripping the trailing zeros from
* the {@code BigDecimal} value {@code 600.0}, which has
* [{@code BigInteger}, {@code scale}] components equals to
* [6000, 1], yields {@code 6E2} with [{@code BigInteger},
* {@code scale}] components equals to [6, -2]. If
* this BigDecimal is numerically equal to zero, then
* {@code BigDecimal.ZERO} is returned.
*
* @return a numerically equal {@code BigDecimal} with any
* trailing zeros removed.
* @since 1.5
*/
public BigDecimal stripTrailingZeros() {
if (intCompact == 0 || (intVal != null && intVal.signum() == 0)) {
return BigDecimal.ZERO;
} else if (intCompact != INFLATED) {
return createAndStripZerosToMatchScale(intCompact, scale, Long.MIN_VALUE);
} else {
return createAndStripZerosToMatchScale(intVal, scale, Long.MIN_VALUE);
}
}
// Comparison Operations
/**
* Compares this {@code BigDecimal} with the specified
* {@code BigDecimal}. Two {@code BigDecimal} objects that are
* equal in value but have a different scale (like 2.0 and 2.00)
* are considered equal by this method. This method is provided
* in preference to individual methods for each of the six boolean
* comparison operators ({@literal <}, ==,
* {@literal >}, {@literal >=}, !=, {@literal <=}). The
* suggested idiom for performing these comparisons is:
* {@code (x.compareTo(y)} <<i>op</i>> {@code 0)}, where
* <<i>op</i>> is one of the six comparison operators.
*
* @param val {@code BigDecimal} to which this {@code BigDecimal} is
* to be compared.
* @return -1, 0, or 1 as this {@code BigDecimal} is numerically
* less than, equal to, or greater than {@code val}.
*/
@Override
public int compareTo(BigDecimal val) {
// Quick path for equal scale and non-inflated case.
if (scale == val.scale) {
long xs = intCompact;
long ys = val.intCompact;
if (xs != INFLATED && ys != INFLATED)
return xs != ys ? ((xs > ys) ? 1 : -1) : 0;
}
int xsign = this.signum();
int ysign = val.signum();
if (xsign != ysign)
return (xsign > ysign) ? 1 : -1;
if (xsign == 0)
return 0;
int cmp = compareMagnitude(val);
return (xsign > 0) ? cmp : -cmp;
}
/**
* Version of compareTo that ignores sign.
*/
private int compareMagnitude(BigDecimal val) {
// Match scales, avoid unnecessary inflation
long ys = val.intCompact;
long xs = this.intCompact;
if (xs == 0)
return (ys == 0) ? 0 : -1;
if (ys == 0)
return 1;
long sdiff = (long)this.scale - val.scale;
if (sdiff != 0) {
// Avoid matching scales if the (adjusted) exponents differ
long xae = (long)this.precision() - this.scale; // [-1]
long yae = (long)val.precision() - val.scale; // [-1]
if (xae < yae)
return -1;
if (xae > yae)
return 1;
if (sdiff < 0) {
// The cases sdiff <= Integer.MIN_VALUE intentionally fall through.
if ( sdiff > Integer.MIN_VALUE &&
(xs == INFLATED ||
(xs = longMultiplyPowerTen(xs, (int)-sdiff)) == INFLATED) &&
ys == INFLATED) {
BigInteger rb = bigMultiplyPowerTen((int)-sdiff);
return rb.compareMagnitude(val.intVal);
}
} else { // sdiff > 0
// The cases sdiff > Integer.MAX_VALUE intentionally fall through.
if ( sdiff <= Integer.MAX_VALUE &&
(ys == INFLATED ||
(ys = longMultiplyPowerTen(ys, (int)sdiff)) == INFLATED) &&
xs == INFLATED) {
BigInteger rb = val.bigMultiplyPowerTen((int)sdiff);
return this.intVal.compareMagnitude(rb);
}
}
}
if (xs != INFLATED)
return (ys != INFLATED) ? longCompareMagnitude(xs, ys) : -1;
else if (ys != INFLATED)
return 1;
else
return this.intVal.compareMagnitude(val.intVal);
}
/**
* Compares this {@code BigDecimal} with the specified
* {@code Object} for equality. Unlike {@link
* #compareTo(BigDecimal) compareTo}, this method considers two
* {@code BigDecimal} objects equal only if they are equal in
* value and scale (thus 2.0 is not equal to 2.00 when compared by
* this method).
*
* @param x {@code Object} to which this {@code BigDecimal} is
* to be compared.
* @return {@code true} if and only if the specified {@code Object} is a
* {@code BigDecimal} whose value and scale are equal to this
* {@code BigDecimal}'s.
* @see #compareTo(java.math.BigDecimal)
* @see #hashCode
*/
@Override
public boolean equals(Object x) {
if (!(x instanceof BigDecimal))
return false;
BigDecimal xDec = (BigDecimal) x;
if (x == this)
return true;
if (scale != xDec.scale)
return false;
long s = this.intCompact;
long xs = xDec.intCompact;
if (s != INFLATED) {
if (xs == INFLATED)
xs = compactValFor(xDec.intVal);
return xs == s;
} else if (xs != INFLATED)
return xs == compactValFor(this.intVal);
return this.inflated().equals(xDec.inflated());
}
/**
* Returns the minimum of this {@code BigDecimal} and
* {@code val}.
*
* @param val value with which the minimum is to be computed.
* @return the {@code BigDecimal} whose value is the lesser of this
* {@code BigDecimal} and {@code val}. If they are equal,
* as defined by the {@link #compareTo(BigDecimal) compareTo}
* method, {@code this} is returned.
* @see #compareTo(java.math.BigDecimal)
*/
public BigDecimal min(BigDecimal val) {
return (compareTo(val) <= 0 ? this : val);
}
/**
* Returns the maximum of this {@code BigDecimal} and {@code val}.
*
* @param val value with which the maximum is to be computed.
* @return the {@code BigDecimal} whose value is the greater of this
* {@code BigDecimal} and {@code val}. If they are equal,
* as defined by the {@link #compareTo(BigDecimal) compareTo}
* method, {@code this} is returned.
* @see #compareTo(java.math.BigDecimal)
*/
public BigDecimal max(BigDecimal val) {
return (compareTo(val) >= 0 ? this : val);
}
// Hash Function
/**
* Returns the hash code for this {@code BigDecimal}. Note that
* two {@code BigDecimal} objects that are numerically equal but
* differ in scale (like 2.0 and 2.00) will generally <em>not</em>
* have the same hash code.
*
* @return hash code for this {@code BigDecimal}.
* @see #equals(Object)
*/
@Override
public int hashCode() {
if (intCompact != INFLATED) {
long val2 = (intCompact < 0)? -intCompact : intCompact;
int temp = (int)( ((int)(val2 >>> 32)) * 31 +
(val2 & LONG_MASK));
return 31*((intCompact < 0) ?-temp:temp) + scale;
} else
return 31*intVal.hashCode() + scale;
}
// Format Converters
/**
* Returns the string representation of this {@code BigDecimal},
* using scientific notation if an exponent is needed.
*
* <p>A standard canonical string form of the {@code BigDecimal}
* is created as though by the following steps: first, the
* absolute value of the unscaled value of the {@code BigDecimal}
* is converted to a string in base ten using the characters
* {@code '0'} through {@code '9'} with no leading zeros (except
* if its value is zero, in which case a single {@code '0'}
* character is used).
*
* <p>Next, an <i>adjusted exponent</i> is calculated; this is the
* negated scale, plus the number of characters in the converted
* unscaled value, less one. That is,
* {@code -scale+(ulength-1)}, where {@code ulength} is the
* length of the absolute value of the unscaled value in decimal
* digits (its <i>precision</i>).
*
* <p>If the scale is greater than or equal to zero and the
* adjusted exponent is greater than or equal to {@code -6}, the
* number will be converted to a character form without using
* exponential notation. In this case, if the scale is zero then
* no decimal point is added and if the scale is positive a
* decimal point will be inserted with the scale specifying the
* number of characters to the right of the decimal point.
* {@code '0'} characters are added to the left of the converted
* unscaled value as necessary. If no character precedes the
* decimal point after this insertion then a conventional
* {@code '0'} character is prefixed.
*
* <p>Otherwise (that is, if the scale is negative, or the
* adjusted exponent is less than {@code -6}), the number will be
* converted to a character form using exponential notation. In
* this case, if the converted {@code BigInteger} has more than
* one digit a decimal point is inserted after the first digit.
* An exponent in character form is then suffixed to the converted
* unscaled value (perhaps with inserted decimal point); this
* comprises the letter {@code 'E'} followed immediately by the
* adjusted exponent converted to a character form. The latter is
* in base ten, using the characters {@code '0'} through
* {@code '9'} with no leading zeros, and is always prefixed by a
* sign character {@code '-'} (<code>'\u002D'</code>) if the
* adjusted exponent is negative, {@code '+'}
* (<code>'\u002B'</code>) otherwise).
*
* <p>Finally, the entire string is prefixed by a minus sign
* character {@code '-'} (<code>'\u002D'</code>) if the unscaled
* value is less than zero. No sign character is prefixed if the
* unscaled value is zero or positive.
*
* <p><b>Examples:</b>
* <p>For each representation [<i>unscaled value</i>, <i>scale</i>]
* on the left, the resulting string is shown on the right.
* <pre>
* [123,0] "123"
* [-123,0] "-123"
* [123,-1] "1.23E+3"
* [123,-3] "1.23E+5"
* [123,1] "12.3"
* [123,5] "0.00123"
* [123,10] "1.23E-8"
* [-123,12] "-1.23E-10"
* </pre>
*
* <b>Notes:</b>
* <ol>
*
* <li>There is a one-to-one mapping between the distinguishable
* {@code BigDecimal} values and the result of this conversion.
* That is, every distinguishable {@code BigDecimal} value
* (unscaled value and scale) has a unique string representation
* as a result of using {@code toString}. If that string
* representation is converted back to a {@code BigDecimal} using
* the {@link #BigDecimal(String)} constructor, then the original
* value will be recovered.
*
* <li>The string produced for a given number is always the same;
* it is not affected by locale. This means that it can be used
* as a canonical string representation for exchanging decimal
* data, or as a key for a Hashtable, etc. Locale-sensitive
* number formatting and parsing is handled by the {@link
* java.text.NumberFormat} class and its subclasses.
*
* <li>The {@link #toEngineeringString} method may be used for
* presenting numbers with exponents in engineering notation, and the
* {@link #setScale(int,RoundingMode) setScale} method may be used for
* rounding a {@code BigDecimal} so it has a known number of digits after
* the decimal point.
*
* <li>The digit-to-character mapping provided by
* {@code Character.forDigit} is used.
*
* </ol>
*
* @return string representation of this {@code BigDecimal}.
* @see Character#forDigit
* @see #BigDecimal(java.lang.String)
*/
@Override
public String toString() {
String sc = stringCache;
if (sc == null) {
stringCache = sc = layoutChars(true);
}
return sc;
}
/**
* Returns a string representation of this {@code BigDecimal},
* using engineering notation if an exponent is needed.
*
* <p>Returns a string that represents the {@code BigDecimal} as
* described in the {@link #toString()} method, except that if
* exponential notation is used, the power of ten is adjusted to
* be a multiple of three (engineering notation) such that the
* integer part of nonzero values will be in the range 1 through
* 999. If exponential notation is used for zero values, a
* decimal point and one or two fractional zero digits are used so
* that the scale of the zero value is preserved. Note that
* unlike the output of {@link #toString()}, the output of this
* method is <em>not</em> guaranteed to recover the same [integer,
* scale] pair of this {@code BigDecimal} if the output string is
* converting back to a {@code BigDecimal} using the {@linkplain
* #BigDecimal(String) string constructor}. The result of this method meets
* the weaker constraint of always producing a numerically equal
* result from applying the string constructor to the method's output.
*
* @return string representation of this {@code BigDecimal}, using
* engineering notation if an exponent is needed.
* @since 1.5
*/
public String toEngineeringString() {
return layoutChars(false);
}
/**
* Returns a string representation of this {@code BigDecimal}
* without an exponent field. For values with a positive scale,
* the number of digits to the right of the decimal point is used
* to indicate scale. For values with a zero or negative scale,
* the resulting string is generated as if the value were
* converted to a numerically equal value with zero scale and as
* if all the trailing zeros of the zero scale value were present
* in the result.
*
* The entire string is prefixed by a minus sign character '-'
* (<code>'\u002D'</code>) if the unscaled value is less than
* zero. No sign character is prefixed if the unscaled value is
* zero or positive.
*
* Note that if the result of this method is passed to the
* {@linkplain #BigDecimal(String) string constructor}, only the
* numerical value of this {@code BigDecimal} will necessarily be
* recovered; the representation of the new {@code BigDecimal}
* may have a different scale. In particular, if this
* {@code BigDecimal} has a negative scale, the string resulting
* from this method will have a scale of zero when processed by
* the string constructor.
*
* (This method behaves analogously to the {@code toString}
* method in 1.4 and earlier releases.)
*
* @return a string representation of this {@code BigDecimal}
* without an exponent field.
* @since 1.5
* @see #toString()
* @see #toEngineeringString()
*/
public String toPlainString() {
if(scale==0) {
if(intCompact!=INFLATED) {
return Long.toString(intCompact);
} else {
return intVal.toString();
}
}
if(this.scale<0) { // No decimal point
if(signum()==0) {
return "0";
}
int trailingZeros = checkScaleNonZero((-(long)scale));
StringBuilder buf;
if(intCompact!=INFLATED) {
buf = new StringBuilder(20+trailingZeros);
buf.append(intCompact);
} else {
String str = intVal.toString();
buf = new StringBuilder(str.length()+trailingZeros);
buf.append(str);
}
for (int i = 0; i < trailingZeros; i++) {
buf.append('0');
}
return buf.toString();
}
String str ;
if(intCompact!=INFLATED) {
str = Long.toString(Math.abs(intCompact));
} else {
str = intVal.abs().toString();
}
return getValueString(signum(), str, scale);
}
/* Returns a digit.digit string */
private String getValueString(int signum, String intString, int scale) {
/* Insert decimal point */
StringBuilder buf;
int insertionPoint = intString.length() - scale;
if (insertionPoint == 0) { /* Point goes right before intVal */
return (signum<0 ? "-0." : "0.") + intString;
} else if (insertionPoint > 0) { /* Point goes inside intVal */
buf = new StringBuilder(intString);
buf.insert(insertionPoint, '.');
if (signum < 0)
buf.insert(0, '-');
} else { /* We must insert zeros between point and intVal */
buf = new StringBuilder(3-insertionPoint + intString.length());
buf.append(signum<0 ? "-0." : "0.");
for (int i=0; i<-insertionPoint; i++) {
buf.append('0');
}
buf.append(intString);
}
return buf.toString();
}
/**
* Converts this {@code BigDecimal} to a {@code BigInteger}.
* This conversion is analogous to the
* <i>narrowing primitive conversion</i> from {@code double} to
* {@code long} as defined in
* <cite>The Java™ Language Specification</cite>:
* any fractional part of this
* {@code BigDecimal} will be discarded. Note that this
* conversion can lose information about the precision of the
* {@code BigDecimal} value.
* <p>
* To have an exception thrown if the conversion is inexact (in
* other words if a nonzero fractional part is discarded), use the
* {@link #toBigIntegerExact()} method.
*
* @return this {@code BigDecimal} converted to a {@code BigInteger}.
* @jls 5.1.3 Narrowing Primitive Conversion
*/
public BigInteger toBigInteger() {
// force to an integer, quietly
return this.setScale(0, ROUND_DOWN).inflated();
}
/**
* Converts this {@code BigDecimal} to a {@code BigInteger},
* checking for lost information. An exception is thrown if this
* {@code BigDecimal} has a nonzero fractional part.
*
* @return this {@code BigDecimal} converted to a {@code BigInteger}.
* @throws ArithmeticException if {@code this} has a nonzero
* fractional part.
* @since 1.5
*/
public BigInteger toBigIntegerExact() {
// round to an integer, with Exception if decimal part non-0
return this.setScale(0, ROUND_UNNECESSARY).inflated();
}
/**
* Converts this {@code BigDecimal} to a {@code long}.
* This conversion is analogous to the
* <i>narrowing primitive conversion</i> from {@code double} to
* {@code short} as defined in
* <cite>The Java™ Language Specification</cite>:
* any fractional part of this
* {@code BigDecimal} will be discarded, and if the resulting
* "{@code BigInteger}" is too big to fit in a
* {@code long}, only the low-order 64 bits are returned.
* Note that this conversion can lose information about the
* overall magnitude and precision of this {@code BigDecimal} value as well
* as return a result with the opposite sign.
*
* @return this {@code BigDecimal} converted to a {@code long}.
* @jls 5.1.3 Narrowing Primitive Conversion
*/
@Override
public long longValue(){
return (intCompact != INFLATED && scale == 0) ?
intCompact:
toBigInteger().longValue();
}
/**
* Converts this {@code BigDecimal} to a {@code long}, checking
* for lost information. If this {@code BigDecimal} has a
* nonzero fractional part or is out of the possible range for a
* {@code long} result then an {@code ArithmeticException} is
* thrown.
*
* @return this {@code BigDecimal} converted to a {@code long}.
* @throws ArithmeticException if {@code this} has a nonzero
* fractional part, or will not fit in a {@code long}.
* @since 1.5
*/
public long longValueExact() {
if (intCompact != INFLATED && scale == 0)
return intCompact;
// If more than 19 digits in integer part it cannot possibly fit
if ((precision() - scale) > 19) // [OK for negative scale too]
throw new java.lang.ArithmeticException("Overflow");
// Fastpath zero and < 1.0 numbers (the latter can be very slow
// to round if very small)
if (this.signum() == 0)
return 0;
if ((this.precision() - this.scale) <= 0)
throw new ArithmeticException("Rounding necessary");
// round to an integer, with Exception if decimal part non-0
BigDecimal num = this.setScale(0, ROUND_UNNECESSARY);
if (num.precision() >= 19) // need to check carefully
LongOverflow.check(num);
return num.inflated().longValue();
}
private static class LongOverflow {
/** BigInteger equal to Long.MIN_VALUE. */
private static final BigInteger LONGMIN = BigInteger.valueOf(Long.MIN_VALUE);
/** BigInteger equal to Long.MAX_VALUE. */
private static final BigInteger LONGMAX = BigInteger.valueOf(Long.MAX_VALUE);
public static void check(BigDecimal num) {
BigInteger intVal = num.inflated();
if (intVal.compareTo(LONGMIN) < 0 ||
intVal.compareTo(LONGMAX) > 0)
throw new java.lang.ArithmeticException("Overflow");
}
}
/**
* Converts this {@code BigDecimal} to an {@code int}.
* This conversion is analogous to the
* <i>narrowing primitive conversion</i> from {@code double} to
* {@code short} as defined in
* <cite>The Java™ Language Specification</cite>:
* any fractional part of this
* {@code BigDecimal} will be discarded, and if the resulting
* "{@code BigInteger}" is too big to fit in an
* {@code int}, only the low-order 32 bits are returned.
* Note that this conversion can lose information about the
* overall magnitude and precision of this {@code BigDecimal}
* value as well as return a result with the opposite sign.
*
* @return this {@code BigDecimal} converted to an {@code int}.
* @jls 5.1.3 Narrowing Primitive Conversion
*/
@Override
public int intValue() {
return (intCompact != INFLATED && scale == 0) ?
(int)intCompact :
toBigInteger().intValue();
}
/**
* Converts this {@code BigDecimal} to an {@code int}, checking
* for lost information. If this {@code BigDecimal} has a
* nonzero fractional part or is out of the possible range for an
* {@code int} result then an {@code ArithmeticException} is
* thrown.
*
* @return this {@code BigDecimal} converted to an {@code int}.
* @throws ArithmeticException if {@code this} has a nonzero
* fractional part, or will not fit in an {@code int}.
* @since 1.5
*/
public int intValueExact() {
long num;
num = this.longValueExact(); // will check decimal part
if ((int)num != num)
throw new java.lang.ArithmeticException("Overflow");
return (int)num;
}
/**
* Converts this {@code BigDecimal} to a {@code short}, checking
* for lost information. If this {@code BigDecimal} has a
* nonzero fractional part or is out of the possible range for a
* {@code short} result then an {@code ArithmeticException} is
* thrown.
*
* @return this {@code BigDecimal} converted to a {@code short}.
* @throws ArithmeticException if {@code this} has a nonzero
* fractional part, or will not fit in a {@code short}.
* @since 1.5
*/
public short shortValueExact() {
long num;
num = this.longValueExact(); // will check decimal part
if ((short)num != num)
throw new java.lang.ArithmeticException("Overflow");
return (short)num;
}
/**
* Converts this {@code BigDecimal} to a {@code byte}, checking
* for lost information. If this {@code BigDecimal} has a
* nonzero fractional part or is out of the possible range for a
* {@code byte} result then an {@code ArithmeticException} is
* thrown.
*
* @return this {@code BigDecimal} converted to a {@code byte}.
* @throws ArithmeticException if {@code this} has a nonzero
* fractional part, or will not fit in a {@code byte}.
* @since 1.5
*/
public byte byteValueExact() {
long num;
num = this.longValueExact(); // will check decimal part
if ((byte)num != num)
throw new java.lang.ArithmeticException("Overflow");
return (byte)num;
}
/**
* Converts this {@code BigDecimal} to a {@code float}.
* This conversion is similar to the
* <i>narrowing primitive conversion</i> from {@code double} to
* {@code float} as defined in
* <cite>The Java™ Language Specification</cite>:
* if this {@code BigDecimal} has too great a
* magnitude to represent as a {@code float}, it will be
* converted to {@link Float#NEGATIVE_INFINITY} or {@link
* Float#POSITIVE_INFINITY} as appropriate. Note that even when
* the return value is finite, this conversion can lose
* information about the precision of the {@code BigDecimal}
* value.
*
* @return this {@code BigDecimal} converted to a {@code float}.
* @jls 5.1.3 Narrowing Primitive Conversion
*/
@Override
public float floatValue(){
if(intCompact != INFLATED) {
if (scale == 0) {
return (float)intCompact;
} else {
/*
* If both intCompact and the scale can be exactly
* represented as float values, perform a single float
* multiply or divide to compute the (properly
* rounded) result.
*/
if (Math.abs(intCompact) < 1L<<22 ) {
// Don't have too guard against
// Math.abs(MIN_VALUE) because of outer check
// against INFLATED.
if (scale > 0 && scale < FLOAT_10_POW.length) {
return (float)intCompact / FLOAT_10_POW[scale];
} else if (scale < 0 && scale > -FLOAT_10_POW.length) {
return (float)intCompact * FLOAT_10_POW[-scale];
}
}
}
}
// Somewhat inefficient, but guaranteed to work.
return Float.parseFloat(this.toString());
}
/**
* Converts this {@code BigDecimal} to a {@code double}.
* This conversion is similar to the
* <i>narrowing primitive conversion</i> from {@code double} to
* {@code float} as defined in
* <cite>The Java™ Language Specification</cite>:
* if this {@code BigDecimal} has too great a
* magnitude represent as a {@code double}, it will be
* converted to {@link Double#NEGATIVE_INFINITY} or {@link
* Double#POSITIVE_INFINITY} as appropriate. Note that even when
* the return value is finite, this conversion can lose
* information about the precision of the {@code BigDecimal}
* value.
*
* @return this {@code BigDecimal} converted to a {@code double}.
* @jls 5.1.3 Narrowing Primitive Conversion
*/
@Override
public double doubleValue(){
if(intCompact != INFLATED) {
if (scale == 0) {
return (double)intCompact;
} else {
/*
* If both intCompact and the scale can be exactly
* represented as double values, perform a single
* double multiply or divide to compute the (properly
* rounded) result.
*/
if (Math.abs(intCompact) < 1L<<52 ) {
// Don't have too guard against
// Math.abs(MIN_VALUE) because of outer check
// against INFLATED.
if (scale > 0 && scale < DOUBLE_10_POW.length) {
return (double)intCompact / DOUBLE_10_POW[scale];
} else if (scale < 0 && scale > -DOUBLE_10_POW.length) {
return (double)intCompact * DOUBLE_10_POW[-scale];
}
}
}
}
// Somewhat inefficient, but guaranteed to work.
return Double.parseDouble(this.toString());
}
/**
* Powers of 10 which can be represented exactly in {@code
* double}.
*/
private static final double DOUBLE_10_POW[] = {
1.0e0, 1.0e1, 1.0e2, 1.0e3, 1.0e4, 1.0e5,
1.0e6, 1.0e7, 1.0e8, 1.0e9, 1.0e10, 1.0e11,
1.0e12, 1.0e13, 1.0e14, 1.0e15, 1.0e16, 1.0e17,
1.0e18, 1.0e19, 1.0e20, 1.0e21, 1.0e22
};
/**
* Powers of 10 which can be represented exactly in {@code
* float}.
*/
private static final float FLOAT_10_POW[] = {
1.0e0f, 1.0e1f, 1.0e2f, 1.0e3f, 1.0e4f, 1.0e5f,
1.0e6f, 1.0e7f, 1.0e8f, 1.0e9f, 1.0e10f
};
/**
* Returns the size of an ulp, a unit in the last place, of this
* {@code BigDecimal}. An ulp of a nonzero {@code BigDecimal}
* value is the positive distance between this value and the
* {@code BigDecimal} value next larger in magnitude with the
* same number of digits. An ulp of a zero value is numerically
* equal to 1 with the scale of {@code this}. The result is
* stored with the same scale as {@code this} so the result
* for zero and nonzero values is equal to {@code [1,
* this.scale()]}.
*
* @return the size of an ulp of {@code this}
* @since 1.5
*/
public BigDecimal ulp() {
return BigDecimal.valueOf(1, this.scale(), 1);
}
// Private class to build a string representation for BigDecimal object.
// "StringBuilderHelper" is constructed as a thread local variable so it is
// thread safe. The StringBuilder field acts as a buffer to hold the temporary
// representation of BigDecimal. The cmpCharArray holds all the characters for
// the compact representation of BigDecimal (except for '-' sign' if it is
// negative) if its intCompact field is not INFLATED. It is shared by all
// calls to toString() and its variants in that particular thread.
static class StringBuilderHelper {
final StringBuilder sb; // Placeholder for BigDecimal string
final char[] cmpCharArray; // character array to place the intCompact
StringBuilderHelper() {
sb = new StringBuilder();
// All non negative longs can be made to fit into 19 character array.
cmpCharArray = new char[19];
}
// Accessors.
StringBuilder getStringBuilder() {
sb.setLength(0);
return sb;
}
char[] getCompactCharArray() {
return cmpCharArray;
}
/**
* Places characters representing the intCompact in {@code long} into
* cmpCharArray and returns the offset to the array where the
* representation starts.
*
* @param intCompact the number to put into the cmpCharArray.
* @return offset to the array where the representation starts.
* Note: intCompact must be greater or equal to zero.
*/
int putIntCompact(long intCompact) {
assert intCompact >= 0;
long q;
int r;
// since we start from the least significant digit, charPos points to
// the last character in cmpCharArray.
int charPos = cmpCharArray.length;
// Get 2 digits/iteration using longs until quotient fits into an int
while (intCompact > Integer.MAX_VALUE) {
q = intCompact / 100;
r = (int)(intCompact - q * 100);
intCompact = q;
cmpCharArray[--charPos] = DIGIT_ONES[r];
cmpCharArray[--charPos] = DIGIT_TENS[r];
}
// Get 2 digits/iteration using ints when i2 >= 100
int q2;
int i2 = (int)intCompact;
while (i2 >= 100) {
q2 = i2 / 100;
r = i2 - q2 * 100;
i2 = q2;
cmpCharArray[--charPos] = DIGIT_ONES[r];
cmpCharArray[--charPos] = DIGIT_TENS[r];
}
cmpCharArray[--charPos] = DIGIT_ONES[i2];
if (i2 >= 10)
cmpCharArray[--charPos] = DIGIT_TENS[i2];
return charPos;
}
static final char[] DIGIT_TENS = {
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'1', '1', '1', '1', '1', '1', '1', '1', '1', '1',
'2', '2', '2', '2', '2', '2', '2', '2', '2', '2',
'3', '3', '3', '3', '3', '3', '3', '3', '3', '3',
'4', '4', '4', '4', '4', '4', '4', '4', '4', '4',
'5', '5', '5', '5', '5', '5', '5', '5', '5', '5',
'6', '6', '6', '6', '6', '6', '6', '6', '6', '6',
'7', '7', '7', '7', '7', '7', '7', '7', '7', '7',
'8', '8', '8', '8', '8', '8', '8', '8', '8', '8',
'9', '9', '9', '9', '9', '9', '9', '9', '9', '9',
};
static final char[] DIGIT_ONES = {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
};
}
/**
* Lay out this {@code BigDecimal} into a {@code char[]} array.
* The Java 1.2 equivalent to this was called {@code getValueString}.
*
* @param sci {@code true} for Scientific exponential notation;
* {@code false} for Engineering
* @return string with canonical string representation of this
* {@code BigDecimal}
*/
private String layoutChars(boolean sci) {
if (scale == 0) // zero scale is trivial
return (intCompact != INFLATED) ?
Long.toString(intCompact):
intVal.toString();
if (scale == 2 &&
intCompact >= 0 && intCompact < Integer.MAX_VALUE) {
// currency fast path
int lowInt = (int)intCompact % 100;
int highInt = (int)intCompact / 100;
return (Integer.toString(highInt) + '.' +
StringBuilderHelper.DIGIT_TENS[lowInt] +
StringBuilderHelper.DIGIT_ONES[lowInt]) ;
}
StringBuilderHelper sbHelper = threadLocalStringBuilderHelper.get();
char[] coeff;
int offset; // offset is the starting index for coeff array
// Get the significand as an absolute value
if (intCompact != INFLATED) {
offset = sbHelper.putIntCompact(Math.abs(intCompact));
coeff = sbHelper.getCompactCharArray();
} else {
offset = 0;
coeff = intVal.abs().toString().toCharArray();
}
// Construct a buffer, with sufficient capacity for all cases.
// If E-notation is needed, length will be: +1 if negative, +1
// if '.' needed, +2 for "E+", + up to 10 for adjusted exponent.
// Otherwise it could have +1 if negative, plus leading "0.00000"
StringBuilder buf = sbHelper.getStringBuilder();
if (signum() < 0) // prefix '-' if negative
buf.append('-');
int coeffLen = coeff.length - offset;
long adjusted = -(long)scale + (coeffLen -1);
if ((scale >= 0) && (adjusted >= -6)) { // plain number
int pad = scale - coeffLen; // count of padding zeros
if (pad >= 0) { // 0.xxx form
buf.append('0');
buf.append('.');
for (; pad>0; pad--) {
buf.append('0');
}
buf.append(coeff, offset, coeffLen);
} else { // xx.xx form
buf.append(coeff, offset, -pad);
buf.append('.');
buf.append(coeff, -pad + offset, scale);
}
} else { // E-notation is needed
if (sci) { // Scientific notation
buf.append(coeff[offset]); // first character
if (coeffLen > 1) { // more to come
buf.append('.');
buf.append(coeff, offset + 1, coeffLen - 1);
}
} else { // Engineering notation
int sig = (int)(adjusted % 3);
if (sig < 0)
sig += 3; // [adjusted was negative]
adjusted -= sig; // now a multiple of 3
sig++;
if (signum() == 0) {
switch (sig) {
case 1:
buf.append('0'); // exponent is a multiple of three
break;
case 2:
buf.append("0.00");
adjusted += 3;
break;
case 3:
buf.append("0.0");
adjusted += 3;
break;
default:
throw new AssertionError("Unexpected sig value " + sig);
}
} else if (sig >= coeffLen) { // significand all in integer
buf.append(coeff, offset, coeffLen);
// may need some zeros, too
for (int i = sig - coeffLen; i > 0; i--) {
buf.append('0');
}
} else { // xx.xxE form
buf.append(coeff, offset, sig);
buf.append('.');
buf.append(coeff, offset + sig, coeffLen - sig);
}
}
if (adjusted != 0) { // [!sci could have made 0]
buf.append('E');
if (adjusted > 0) // force sign for positive
buf.append('+');
buf.append(adjusted);
}
}
return buf.toString();
}
/**
* Return 10 to the power n, as a {@code BigInteger}.
*
* @param n the power of ten to be returned (>=0)
* @return a {@code BigInteger} with the value (10<sup>n</sup>)
*/
private static BigInteger bigTenToThe(int n) {
if (n < 0)
return BigInteger.ZERO;
if (n < BIG_TEN_POWERS_TABLE_MAX) {
BigInteger[] pows = BIG_TEN_POWERS_TABLE;
if (n < pows.length)
return pows[n];
else
return expandBigIntegerTenPowers(n);
}
return BigInteger.TEN.pow(n);
}
/**
* Expand the BIG_TEN_POWERS_TABLE array to contain at least 10**n.
*
* @param n the power of ten to be returned (>=0)
* @return a {@code BigDecimal} with the value (10<sup>n</sup>) and
* in the meantime, the BIG_TEN_POWERS_TABLE array gets
* expanded to the size greater than n.
*/
private static BigInteger expandBigIntegerTenPowers(int n) {
synchronized(BigDecimal.class) {
BigInteger[] pows = BIG_TEN_POWERS_TABLE;
int curLen = pows.length;
// The following comparison and the above synchronized statement is
// to prevent multiple threads from expanding the same array.
if (curLen <= n) {
int newLen = curLen << 1;
while (newLen <= n) {
newLen <<= 1;
}
pows = Arrays.copyOf(pows, newLen);
for (int i = curLen; i < newLen; i++) {
pows[i] = pows[i - 1].multiply(BigInteger.TEN);
}
// Based on the following facts:
// 1. pows is a private local varible;
// 2. the following store is a volatile store.
// the newly created array elements can be safely published.
BIG_TEN_POWERS_TABLE = pows;
}
return pows[n];
}
}
private static final long[] LONG_TEN_POWERS_TABLE = {
1, // 0 / 10^0
10, // 1 / 10^1
100, // 2 / 10^2
1000, // 3 / 10^3
10000, // 4 / 10^4
100000, // 5 / 10^5
1000000, // 6 / 10^6
10000000, // 7 / 10^7
100000000, // 8 / 10^8
1000000000, // 9 / 10^9
10000000000L, // 10 / 10^10
100000000000L, // 11 / 10^11
1000000000000L, // 12 / 10^12
10000000000000L, // 13 / 10^13
100000000000000L, // 14 / 10^14
1000000000000000L, // 15 / 10^15
10000000000000000L, // 16 / 10^16
100000000000000000L, // 17 / 10^17
1000000000000000000L // 18 / 10^18
};
private static volatile BigInteger BIG_TEN_POWERS_TABLE[] = {
BigInteger.ONE,
BigInteger.valueOf(10),
BigInteger.valueOf(100),
BigInteger.valueOf(1000),
BigInteger.valueOf(10000),
BigInteger.valueOf(100000),
BigInteger.valueOf(1000000),
BigInteger.valueOf(10000000),
BigInteger.valueOf(100000000),
BigInteger.valueOf(1000000000),
BigInteger.valueOf(10000000000L),
BigInteger.valueOf(100000000000L),
BigInteger.valueOf(1000000000000L),
BigInteger.valueOf(10000000000000L),
BigInteger.valueOf(100000000000000L),
BigInteger.valueOf(1000000000000000L),
BigInteger.valueOf(10000000000000000L),
BigInteger.valueOf(100000000000000000L),
BigInteger.valueOf(1000000000000000000L)
};
private static final int BIG_TEN_POWERS_TABLE_INITLEN =
BIG_TEN_POWERS_TABLE.length;
private static final int BIG_TEN_POWERS_TABLE_MAX =
16 * BIG_TEN_POWERS_TABLE_INITLEN;
private static final long THRESHOLDS_TABLE[] = {
Long.MAX_VALUE, // 0
Long.MAX_VALUE/10L, // 1
Long.MAX_VALUE/100L, // 2
Long.MAX_VALUE/1000L, // 3
Long.MAX_VALUE/10000L, // 4
Long.MAX_VALUE/100000L, // 5
Long.MAX_VALUE/1000000L, // 6
Long.MAX_VALUE/10000000L, // 7
Long.MAX_VALUE/100000000L, // 8
Long.MAX_VALUE/1000000000L, // 9
Long.MAX_VALUE/10000000000L, // 10
Long.MAX_VALUE/100000000000L, // 11
Long.MAX_VALUE/1000000000000L, // 12
Long.MAX_VALUE/10000000000000L, // 13
Long.MAX_VALUE/100000000000000L, // 14
Long.MAX_VALUE/1000000000000000L, // 15
Long.MAX_VALUE/10000000000000000L, // 16
Long.MAX_VALUE/100000000000000000L, // 17
Long.MAX_VALUE/1000000000000000000L // 18
};
/**
* Compute val * 10 ^ n; return this product if it is
* representable as a long, INFLATED otherwise.
*/
private static long longMultiplyPowerTen(long val, int n) {
if (val == 0 || n <= 0)
return val;
long[] tab = LONG_TEN_POWERS_TABLE;
long[] bounds = THRESHOLDS_TABLE;
if (n < tab.length && n < bounds.length) {
long tenpower = tab[n];
if (val == 1)
return tenpower;
if (Math.abs(val) <= bounds[n])
return val * tenpower;
}
return INFLATED;
}
/**
* Compute this * 10 ^ n.
* Needed mainly to allow special casing to trap zero value
*/
private BigInteger bigMultiplyPowerTen(int n) {
if (n <= 0)
return this.inflated();
if (intCompact != INFLATED)
return bigTenToThe(n).multiply(intCompact);
else
return intVal.multiply(bigTenToThe(n));
}
/**
* Returns appropriate BigInteger from intVal field if intVal is
* null, i.e. the compact representation is in use.
*/
private BigInteger inflated() {
if (intVal == null) {
return BigInteger.valueOf(intCompact);
}
return intVal;
}
/**
* Match the scales of two {@code BigDecimal}s to align their
* least significant digits.
*
* <p>If the scales of val[0] and val[1] differ, rescale
* (non-destructively) the lower-scaled {@code BigDecimal} so
* they match. That is, the lower-scaled reference will be
* replaced by a reference to a new object with the same scale as
* the other {@code BigDecimal}.
*
* @param val array of two elements referring to the two
* {@code BigDecimal}s to be aligned.
*/
private static void matchScale(BigDecimal[] val) {
if (val[0].scale < val[1].scale) {
val[0] = val[0].setScale(val[1].scale, ROUND_UNNECESSARY);
} else if (val[1].scale < val[0].scale) {
val[1] = val[1].setScale(val[0].scale, ROUND_UNNECESSARY);
}
}
private static class UnsafeHolder {
private static final jdk.internal.misc.Unsafe unsafe;
private static final long intCompactOffset;
private static final long intValOffset;
static {
try {
unsafe = jdk.internal.misc.Unsafe.getUnsafe();
intCompactOffset = unsafe.objectFieldOffset
(BigDecimal.class.getDeclaredField("intCompact"));
intValOffset = unsafe.objectFieldOffset
(BigDecimal.class.getDeclaredField("intVal"));
} catch (Exception ex) {
throw new ExceptionInInitializerError(ex);
}
}
static void setIntCompact(BigDecimal bd, long val) {
unsafe.putLong(bd, intCompactOffset, val);
}
static void setIntValVolatile(BigDecimal bd, BigInteger val) {
unsafe.putObjectVolatile(bd, intValOffset, val);
}
}
/**
* Reconstitute the {@code BigDecimal} instance from a stream (that is,
* deserialize it).
*
* @param s the stream being read.
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
// Read in all fields
s.defaultReadObject();
// validate possibly bad fields
if (intVal == null) {
String message = "BigDecimal: null intVal in stream";
throw new java.io.StreamCorruptedException(message);
// [all values of scale are now allowed]
}
UnsafeHolder.setIntCompact(this, compactValFor(intVal));
}
/**
* Serialize this {@code BigDecimal} to the stream in question
*
* @param s the stream to serialize to.
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
// Must inflate to maintain compatible serial form.
if (this.intVal == null)
UnsafeHolder.setIntValVolatile(this, BigInteger.valueOf(this.intCompact));
// Could reset intVal back to null if it has to be set.
s.defaultWriteObject();
}
/**
* Returns the length of the absolute value of a {@code long}, in decimal
* digits.
*
* @param x the {@code long}
* @return the length of the unscaled value, in deciaml digits.
*/
static int longDigitLength(long x) {
/*
* As described in "Bit Twiddling Hacks" by Sean Anderson,
* (http://graphics.stanford.edu/~seander/bithacks.html)
* integer log 10 of x is within 1 of (1233/4096)* (1 +
* integer log 2 of x). The fraction 1233/4096 approximates
* log10(2). So we first do a version of log2 (a variant of
* Long class with pre-checks and opposite directionality) and
* then scale and check against powers table. This is a little
* simpler in present context than the version in Hacker's
* Delight sec 11-4. Adding one to bit length allows comparing
* downward from the LONG_TEN_POWERS_TABLE that we need
* anyway.
*/
assert x != BigDecimal.INFLATED;
if (x < 0)
x = -x;
if (x < 10) // must screen for 0, might as well 10
return 1;
int r = ((64 - Long.numberOfLeadingZeros(x) + 1) * 1233) >>> 12;
long[] tab = LONG_TEN_POWERS_TABLE;
// if r >= length, must have max possible digits for long
return (r >= tab.length || x < tab[r]) ? r : r + 1;
}
/**
* Returns the length of the absolute value of a BigInteger, in
* decimal digits.
*
* @param b the BigInteger
* @return the length of the unscaled value, in decimal digits
*/
private static int bigDigitLength(BigInteger b) {
/*
* Same idea as the long version, but we need a better
* approximation of log10(2). Using 646456993/2^31
* is accurate up to max possible reported bitLength.
*/
if (b.signum == 0)
return 1;
int r = (int)((((long)b.bitLength() + 1) * 646456993) >>> 31);
return b.compareMagnitude(bigTenToThe(r)) < 0? r : r+1;
}
/**
* Check a scale for Underflow or Overflow. If this BigDecimal is
* nonzero, throw an exception if the scale is outof range. If this
* is zero, saturate the scale to the extreme value of the right
* sign if the scale is out of range.
*
* @param val The new scale.
* @throws ArithmeticException (overflow or underflow) if the new
* scale is out of range.
* @return validated scale as an int.
*/
private int checkScale(long val) {
int asInt = (int)val;
if (asInt != val) {
asInt = val>Integer.MAX_VALUE ? Integer.MAX_VALUE : Integer.MIN_VALUE;
BigInteger b;
if (intCompact != 0 &&
((b = intVal) == null || b.signum() != 0))
throw new ArithmeticException(asInt>0 ? "Underflow":"Overflow");
}
return asInt;
}
/**
* Returns the compact value for given {@code BigInteger}, or
* INFLATED if too big. Relies on internal representation of
* {@code BigInteger}.
*/
private static long compactValFor(BigInteger b) {
int[] m = b.mag;
int len = m.length;
if (len == 0)
return 0;
int d = m[0];
if (len > 2 || (len == 2 && d < 0))
return INFLATED;
long u = (len == 2)?
(((long) m[1] & LONG_MASK) + (((long)d) << 32)) :
(((long)d) & LONG_MASK);
return (b.signum < 0)? -u : u;
}
private static int longCompareMagnitude(long x, long y) {
if (x < 0)
x = -x;
if (y < 0)
y = -y;
return (x < y) ? -1 : ((x == y) ? 0 : 1);
}
private static int saturateLong(long s) {
int i = (int)s;
return (s == i) ? i : (s < 0 ? Integer.MIN_VALUE : Integer.MAX_VALUE);
}
/*
* Internal printing routine
*/
private static void print(String name, BigDecimal bd) {
System.err.format("%s:\tintCompact %d\tintVal %d\tscale %d\tprecision %d%n",
name,
bd.intCompact,
bd.intVal,
bd.scale,
bd.precision);
}
/**
* Check internal invariants of this BigDecimal. These invariants
* include:
*
* <ul>
*
* <li>The object must be initialized; either intCompact must not be
* INFLATED or intVal is non-null. Both of these conditions may
* be true.
*
* <li>If both intCompact and intVal and set, their values must be
* consistent.
*
* <li>If precision is nonzero, it must have the right value.
* </ul>
*
* Note: Since this is an audit method, we are not supposed to change the
* state of this BigDecimal object.
*/
private BigDecimal audit() {
if (intCompact == INFLATED) {
if (intVal == null) {
print("audit", this);
throw new AssertionError("null intVal");
}
// Check precision
if (precision > 0 && precision != bigDigitLength(intVal)) {
print("audit", this);
throw new AssertionError("precision mismatch");
}
} else {
if (intVal != null) {
long val = intVal.longValue();
if (val != intCompact) {
print("audit", this);
throw new AssertionError("Inconsistent state, intCompact=" +
intCompact + "\t intVal=" + val);
}
}
// Check precision
if (precision > 0 && precision != longDigitLength(intCompact)) {
print("audit", this);
throw new AssertionError("precision mismatch");
}
}
return this;
}
/* the same as checkScale where value!=0 */
private static int checkScaleNonZero(long val) {
int asInt = (int)val;
if (asInt != val) {
throw new ArithmeticException(asInt>0 ? "Underflow":"Overflow");
}
return asInt;
}
private static int checkScale(long intCompact, long val) {
int asInt = (int)val;
if (asInt != val) {
asInt = val>Integer.MAX_VALUE ? Integer.MAX_VALUE : Integer.MIN_VALUE;
if (intCompact != 0)
throw new ArithmeticException(asInt>0 ? "Underflow":"Overflow");
}
return asInt;
}
private static int checkScale(BigInteger intVal, long val) {
int asInt = (int)val;
if (asInt != val) {
asInt = val>Integer.MAX_VALUE ? Integer.MAX_VALUE : Integer.MIN_VALUE;
if (intVal.signum() != 0)
throw new ArithmeticException(asInt>0 ? "Underflow":"Overflow");
}
return asInt;
}
/**
* Returns a {@code BigDecimal} rounded according to the MathContext
* settings;
* If rounding is needed a new {@code BigDecimal} is created and returned.
*
* @param val the value to be rounded
* @param mc the context to use.
* @return a {@code BigDecimal} rounded according to the MathContext
* settings. May return {@code value}, if no rounding needed.
* @throws ArithmeticException if the rounding mode is
* {@code RoundingMode.UNNECESSARY} and the
* result is inexact.
*/
private static BigDecimal doRound(BigDecimal val, MathContext mc) {
int mcp = mc.precision;
boolean wasDivided = false;
if (mcp > 0) {
BigInteger intVal = val.intVal;
long compactVal = val.intCompact;
int scale = val.scale;
int prec = val.precision();
int mode = mc.roundingMode.oldMode;
int drop;
if (compactVal == INFLATED) {
drop = prec - mcp;
while (drop > 0) {
scale = checkScaleNonZero((long) scale - drop);
intVal = divideAndRoundByTenPow(intVal, drop, mode);
wasDivided = true;
compactVal = compactValFor(intVal);
if (compactVal != INFLATED) {
prec = longDigitLength(compactVal);
break;
}
prec = bigDigitLength(intVal);
drop = prec - mcp;
}
}
if (compactVal != INFLATED) {
drop = prec - mcp; // drop can't be more than 18
while (drop > 0) {
scale = checkScaleNonZero((long) scale - drop);
compactVal = divideAndRound(compactVal, LONG_TEN_POWERS_TABLE[drop], mc.roundingMode.oldMode);
wasDivided = true;
prec = longDigitLength(compactVal);
drop = prec - mcp;
intVal = null;
}
}
return wasDivided ? new BigDecimal(intVal,compactVal,scale,prec) : val;
}
return val;
}
/*
* Returns a {@code BigDecimal} created from {@code long} value with
* given scale rounded according to the MathContext settings
*/
private static BigDecimal doRound(long compactVal, int scale, MathContext mc) {
int mcp = mc.precision;
if (mcp > 0 && mcp < 19) {
int prec = longDigitLength(compactVal);
int drop = prec - mcp; // drop can't be more than 18
while (drop > 0) {
scale = checkScaleNonZero((long) scale - drop);
compactVal = divideAndRound(compactVal, LONG_TEN_POWERS_TABLE[drop], mc.roundingMode.oldMode);
prec = longDigitLength(compactVal);
drop = prec - mcp;
}
return valueOf(compactVal, scale, prec);
}
return valueOf(compactVal, scale);
}
/*
* Returns a {@code BigDecimal} created from {@code BigInteger} value with
* given scale rounded according to the MathContext settings
*/
private static BigDecimal doRound(BigInteger intVal, int scale, MathContext mc) {
int mcp = mc.precision;
int prec = 0;
if (mcp > 0) {
long compactVal = compactValFor(intVal);
int mode = mc.roundingMode.oldMode;
int drop;
if (compactVal == INFLATED) {
prec = bigDigitLength(intVal);
drop = prec - mcp;
while (drop > 0) {
scale = checkScaleNonZero((long) scale - drop);
intVal = divideAndRoundByTenPow(intVal, drop, mode);
compactVal = compactValFor(intVal);
if (compactVal != INFLATED) {
break;
}
prec = bigDigitLength(intVal);
drop = prec - mcp;
}
}
if (compactVal != INFLATED) {
prec = longDigitLength(compactVal);
drop = prec - mcp; // drop can't be more than 18
while (drop > 0) {
scale = checkScaleNonZero((long) scale - drop);
compactVal = divideAndRound(compactVal, LONG_TEN_POWERS_TABLE[drop], mc.roundingMode.oldMode);
prec = longDigitLength(compactVal);
drop = prec - mcp;
}
return valueOf(compactVal,scale,prec);
}
}
return new BigDecimal(intVal,INFLATED,scale,prec);
}
/*
* Divides {@code BigInteger} value by ten power.
*/
private static BigInteger divideAndRoundByTenPow(BigInteger intVal, int tenPow, int roundingMode) {
if (tenPow < LONG_TEN_POWERS_TABLE.length)
intVal = divideAndRound(intVal, LONG_TEN_POWERS_TABLE[tenPow], roundingMode);
else
intVal = divideAndRound(intVal, bigTenToThe(tenPow), roundingMode);
return intVal;
}
/**
* Internally used for division operation for division {@code long} by
* {@code long}.
* The returned {@code BigDecimal} object is the quotient whose scale is set
* to the passed in scale. If the remainder is not zero, it will be rounded
* based on the passed in roundingMode. Also, if the remainder is zero and
* the last parameter, i.e. preferredScale is NOT equal to scale, the
* trailing zeros of the result is stripped to match the preferredScale.
*/
private static BigDecimal divideAndRound(long ldividend, long ldivisor, int scale, int roundingMode,
int preferredScale) {
int qsign; // quotient sign
long q = ldividend / ldivisor; // store quotient in long
if (roundingMode == ROUND_DOWN && scale == preferredScale)
return valueOf(q, scale);
long r = ldividend % ldivisor; // store remainder in long
qsign = ((ldividend < 0) == (ldivisor < 0)) ? 1 : -1;
if (r != 0) {
boolean increment = needIncrement(ldivisor, roundingMode, qsign, q, r);
return valueOf((increment ? q + qsign : q), scale);
} else {
if (preferredScale != scale)
return createAndStripZerosToMatchScale(q, scale, preferredScale);
else
return valueOf(q, scale);
}
}
/**
* Divides {@code long} by {@code long} and do rounding based on the
* passed in roundingMode.
*/
private static long divideAndRound(long ldividend, long ldivisor, int roundingMode) {
int qsign; // quotient sign
long q = ldividend / ldivisor; // store quotient in long
if (roundingMode == ROUND_DOWN)
return q;
long r = ldividend % ldivisor; // store remainder in long
qsign = ((ldividend < 0) == (ldivisor < 0)) ? 1 : -1;
if (r != 0) {
boolean increment = needIncrement(ldivisor, roundingMode, qsign, q, r);
return increment ? q + qsign : q;
} else {
return q;
}
}
/**
* Shared logic of need increment computation.
*/
private static boolean commonNeedIncrement(int roundingMode, int qsign,
int cmpFracHalf, boolean oddQuot) {
switch(roundingMode) {
case ROUND_UNNECESSARY:
throw new ArithmeticException("Rounding necessary");
case ROUND_UP: // Away from zero
return true;
case ROUND_DOWN: // Towards zero
return false;
case ROUND_CEILING: // Towards +infinity
return qsign > 0;
case ROUND_FLOOR: // Towards -infinity
return qsign < 0;
default: // Some kind of half-way rounding
assert roundingMode >= ROUND_HALF_UP &&
roundingMode <= ROUND_HALF_EVEN: "Unexpected rounding mode" + RoundingMode.valueOf(roundingMode);
if (cmpFracHalf < 0 ) // We're closer to higher digit
return false;
else if (cmpFracHalf > 0 ) // We're closer to lower digit
return true;
else { // half-way
assert cmpFracHalf == 0;
switch(roundingMode) {
case ROUND_HALF_DOWN:
return false;
case ROUND_HALF_UP:
return true;
case ROUND_HALF_EVEN:
return oddQuot;
default:
throw new AssertionError("Unexpected rounding mode" + roundingMode);
}
}
}
}
/**
* Tests if quotient has to be incremented according the roundingMode
*/
private static boolean needIncrement(long ldivisor, int roundingMode,
int qsign, long q, long r) {
assert r != 0L;
int cmpFracHalf;
if (r <= HALF_LONG_MIN_VALUE || r > HALF_LONG_MAX_VALUE) {
cmpFracHalf = 1; // 2 * r can't fit into long
} else {
cmpFracHalf = longCompareMagnitude(2 * r, ldivisor);
}
return commonNeedIncrement(roundingMode, qsign, cmpFracHalf, (q & 1L) != 0L);
}
/**
* Divides {@code BigInteger} value by {@code long} value and
* do rounding based on the passed in roundingMode.
*/
private static BigInteger divideAndRound(BigInteger bdividend, long ldivisor, int roundingMode) {
// Descend into mutables for faster remainder checks
MutableBigInteger mdividend = new MutableBigInteger(bdividend.mag);
// store quotient
MutableBigInteger mq = new MutableBigInteger();
// store quotient & remainder in long
long r = mdividend.divide(ldivisor, mq);
// record remainder is zero or not
boolean isRemainderZero = (r == 0);
// quotient sign
int qsign = (ldivisor < 0) ? -bdividend.signum : bdividend.signum;
if (!isRemainderZero) {
if(needIncrement(ldivisor, roundingMode, qsign, mq, r)) {
mq.add(MutableBigInteger.ONE);
}
}
return mq.toBigInteger(qsign);
}
/**
* Internally used for division operation for division {@code BigInteger}
* by {@code long}.
* The returned {@code BigDecimal} object is the quotient whose scale is set
* to the passed in scale. If the remainder is not zero, it will be rounded
* based on the passed in roundingMode. Also, if the remainder is zero and
* the last parameter, i.e. preferredScale is NOT equal to scale, the
* trailing zeros of the result is stripped to match the preferredScale.
*/
private static BigDecimal divideAndRound(BigInteger bdividend,
long ldivisor, int scale, int roundingMode, int preferredScale) {
// Descend into mutables for faster remainder checks
MutableBigInteger mdividend = new MutableBigInteger(bdividend.mag);
// store quotient
MutableBigInteger mq = new MutableBigInteger();
// store quotient & remainder in long
long r = mdividend.divide(ldivisor, mq);
// record remainder is zero or not
boolean isRemainderZero = (r == 0);
// quotient sign
int qsign = (ldivisor < 0) ? -bdividend.signum : bdividend.signum;
if (!isRemainderZero) {
if(needIncrement(ldivisor, roundingMode, qsign, mq, r)) {
mq.add(MutableBigInteger.ONE);
}
return mq.toBigDecimal(qsign, scale);
} else {
if (preferredScale != scale) {
long compactVal = mq.toCompactValue(qsign);
if(compactVal!=INFLATED) {
return createAndStripZerosToMatchScale(compactVal, scale, preferredScale);
}
BigInteger intVal = mq.toBigInteger(qsign);
return createAndStripZerosToMatchScale(intVal,scale, preferredScale);
} else {
return mq.toBigDecimal(qsign, scale);
}
}
}
/**
* Tests if quotient has to be incremented according the roundingMode
*/
private static boolean needIncrement(long ldivisor, int roundingMode,
int qsign, MutableBigInteger mq, long r) {
assert r != 0L;
int cmpFracHalf;
if (r <= HALF_LONG_MIN_VALUE || r > HALF_LONG_MAX_VALUE) {
cmpFracHalf = 1; // 2 * r can't fit into long
} else {
cmpFracHalf = longCompareMagnitude(2 * r, ldivisor);
}
return commonNeedIncrement(roundingMode, qsign, cmpFracHalf, mq.isOdd());
}
/**
* Divides {@code BigInteger} value by {@code BigInteger} value and
* do rounding based on the passed in roundingMode.
*/
private static BigInteger divideAndRound(BigInteger bdividend, BigInteger bdivisor, int roundingMode) {
boolean isRemainderZero; // record remainder is zero or not
int qsign; // quotient sign
// Descend into mutables for faster remainder checks
MutableBigInteger mdividend = new MutableBigInteger(bdividend.mag);
MutableBigInteger mq = new MutableBigInteger();
MutableBigInteger mdivisor = new MutableBigInteger(bdivisor.mag);
MutableBigInteger mr = mdividend.divide(mdivisor, mq);
isRemainderZero = mr.isZero();
qsign = (bdividend.signum != bdivisor.signum) ? -1 : 1;
if (!isRemainderZero) {
if (needIncrement(mdivisor, roundingMode, qsign, mq, mr)) {
mq.add(MutableBigInteger.ONE);
}
}
return mq.toBigInteger(qsign);
}
/**
* Internally used for division operation for division {@code BigInteger}
* by {@code BigInteger}.
* The returned {@code BigDecimal} object is the quotient whose scale is set
* to the passed in scale. If the remainder is not zero, it will be rounded
* based on the passed in roundingMode. Also, if the remainder is zero and
* the last parameter, i.e. preferredScale is NOT equal to scale, the
* trailing zeros of the result is stripped to match the preferredScale.
*/
private static BigDecimal divideAndRound(BigInteger bdividend, BigInteger bdivisor, int scale, int roundingMode,
int preferredScale) {
boolean isRemainderZero; // record remainder is zero or not
int qsign; // quotient sign
// Descend into mutables for faster remainder checks
MutableBigInteger mdividend = new MutableBigInteger(bdividend.mag);
MutableBigInteger mq = new MutableBigInteger();
MutableBigInteger mdivisor = new MutableBigInteger(bdivisor.mag);
MutableBigInteger mr = mdividend.divide(mdivisor, mq);
isRemainderZero = mr.isZero();
qsign = (bdividend.signum != bdivisor.signum) ? -1 : 1;
if (!isRemainderZero) {
if (needIncrement(mdivisor, roundingMode, qsign, mq, mr)) {
mq.add(MutableBigInteger.ONE);
}
return mq.toBigDecimal(qsign, scale);
} else {
if (preferredScale != scale) {
long compactVal = mq.toCompactValue(qsign);
if (compactVal != INFLATED) {
return createAndStripZerosToMatchScale(compactVal, scale, preferredScale);
}
BigInteger intVal = mq.toBigInteger(qsign);
return createAndStripZerosToMatchScale(intVal, scale, preferredScale);
} else {
return mq.toBigDecimal(qsign, scale);
}
}
}
/**
* Tests if quotient has to be incremented according the roundingMode
*/
private static boolean needIncrement(MutableBigInteger mdivisor, int roundingMode,
int qsign, MutableBigInteger mq, MutableBigInteger mr) {
assert !mr.isZero();
int cmpFracHalf = mr.compareHalf(mdivisor);
return commonNeedIncrement(roundingMode, qsign, cmpFracHalf, mq.isOdd());
}
/**
* Remove insignificant trailing zeros from this
* {@code BigInteger} value until the preferred scale is reached or no
* more zeros can be removed. If the preferred scale is less than
* Integer.MIN_VALUE, all the trailing zeros will be removed.
*
* @return new {@code BigDecimal} with a scale possibly reduced
* to be closed to the preferred scale.
*/
private static BigDecimal createAndStripZerosToMatchScale(BigInteger intVal, int scale, long preferredScale) {
BigInteger qr[]; // quotient-remainder pair
while (intVal.compareMagnitude(BigInteger.TEN) >= 0
&& scale > preferredScale) {
if (intVal.testBit(0))
break; // odd number cannot end in 0
qr = intVal.divideAndRemainder(BigInteger.TEN);
if (qr[1].signum() != 0)
break; // non-0 remainder
intVal = qr[0];
scale = checkScale(intVal,(long) scale - 1); // could Overflow
}
return valueOf(intVal, scale, 0);
}
/**
* Remove insignificant trailing zeros from this
* {@code long} value until the preferred scale is reached or no
* more zeros can be removed. If the preferred scale is less than
* Integer.MIN_VALUE, all the trailing zeros will be removed.
*
* @return new {@code BigDecimal} with a scale possibly reduced
* to be closed to the preferred scale.
*/
private static BigDecimal createAndStripZerosToMatchScale(long compactVal, int scale, long preferredScale) {
while (Math.abs(compactVal) >= 10L && scale > preferredScale) {
if ((compactVal & 1L) != 0L)
break; // odd number cannot end in 0
long r = compactVal % 10L;
if (r != 0L)
break; // non-0 remainder
compactVal /= 10;
scale = checkScale(compactVal, (long) scale - 1); // could Overflow
}
return valueOf(compactVal, scale);
}
private static BigDecimal stripZerosToMatchScale(BigInteger intVal, long intCompact, int scale, int preferredScale) {
if(intCompact!=INFLATED) {
return createAndStripZerosToMatchScale(intCompact, scale, preferredScale);
} else {
return createAndStripZerosToMatchScale(intVal==null ? INFLATED_BIGINT : intVal,
scale, preferredScale);
}
}
/*
* returns INFLATED if oveflow
*/
private static long add(long xs, long ys){
long sum = xs + ys;
// See "Hacker's Delight" section 2-12 for explanation of
// the overflow test.
if ( (((sum ^ xs) & (sum ^ ys))) >= 0L) { // not overflowed
return sum;
}
return INFLATED;
}
private static BigDecimal add(long xs, long ys, int scale){
long sum = add(xs, ys);
if (sum!=INFLATED)
return BigDecimal.valueOf(sum, scale);
return new BigDecimal(BigInteger.valueOf(xs).add(ys), scale);
}
private static BigDecimal add(final long xs, int scale1, final long ys, int scale2) {
long sdiff = (long) scale1 - scale2;
if (sdiff == 0) {
return add(xs, ys, scale1);
} else if (sdiff < 0) {
int raise = checkScale(xs,-sdiff);
long scaledX = longMultiplyPowerTen(xs, raise);
if (scaledX != INFLATED) {
return add(scaledX, ys, scale2);
} else {
BigInteger bigsum = bigMultiplyPowerTen(xs,raise).add(ys);
return ((xs^ys)>=0) ? // same sign test
new BigDecimal(bigsum, INFLATED, scale2, 0)
: valueOf(bigsum, scale2, 0);
}
} else {
int raise = checkScale(ys,sdiff);
long scaledY = longMultiplyPowerTen(ys, raise);
if (scaledY != INFLATED) {
return add(xs, scaledY, scale1);
} else {
BigInteger bigsum = bigMultiplyPowerTen(ys,raise).add(xs);
return ((xs^ys)>=0) ?
new BigDecimal(bigsum, INFLATED, scale1, 0)
: valueOf(bigsum, scale1, 0);
}
}
}
private static BigDecimal add(final long xs, int scale1, BigInteger snd, int scale2) {
int rscale = scale1;
long sdiff = (long)rscale - scale2;
boolean sameSigns = (Long.signum(xs) == snd.signum);
BigInteger sum;
if (sdiff < 0) {
int raise = checkScale(xs,-sdiff);
rscale = scale2;
long scaledX = longMultiplyPowerTen(xs, raise);
if (scaledX == INFLATED) {
sum = snd.add(bigMultiplyPowerTen(xs,raise));
} else {
sum = snd.add(scaledX);
}
} else { //if (sdiff > 0) {
int raise = checkScale(snd,sdiff);
snd = bigMultiplyPowerTen(snd,raise);
sum = snd.add(xs);
}
return (sameSigns) ?
new BigDecimal(sum, INFLATED, rscale, 0) :
valueOf(sum, rscale, 0);
}
private static BigDecimal add(BigInteger fst, int scale1, BigInteger snd, int scale2) {
int rscale = scale1;
long sdiff = (long)rscale - scale2;
if (sdiff != 0) {
if (sdiff < 0) {
int raise = checkScale(fst,-sdiff);
rscale = scale2;
fst = bigMultiplyPowerTen(fst,raise);
} else {
int raise = checkScale(snd,sdiff);
snd = bigMultiplyPowerTen(snd,raise);
}
}
BigInteger sum = fst.add(snd);
return (fst.signum == snd.signum) ?
new BigDecimal(sum, INFLATED, rscale, 0) :
valueOf(sum, rscale, 0);
}
private static BigInteger bigMultiplyPowerTen(long value, int n) {
if (n <= 0)
return BigInteger.valueOf(value);
return bigTenToThe(n).multiply(value);
}
private static BigInteger bigMultiplyPowerTen(BigInteger value, int n) {
if (n <= 0)
return value;
if(n<LONG_TEN_POWERS_TABLE.length) {
return value.multiply(LONG_TEN_POWERS_TABLE[n]);
}
return value.multiply(bigTenToThe(n));
}
/**
* Returns a {@code BigDecimal} whose value is {@code (xs /
* ys)}, with rounding according to the context settings.
*
* Fast path - used only when (xscale <= yscale && yscale < 18
* && mc.presision<18) {
*/
private static BigDecimal divideSmallFastPath(final long xs, int xscale,
final long ys, int yscale,
long preferredScale, MathContext mc) {
int mcp = mc.precision;
int roundingMode = mc.roundingMode.oldMode;
assert (xscale <= yscale) && (yscale < 18) && (mcp < 18);
int xraise = yscale - xscale; // xraise >=0
long scaledX = (xraise==0) ? xs :
longMultiplyPowerTen(xs, xraise); // can't overflow here!
BigDecimal quotient;
int cmp = longCompareMagnitude(scaledX, ys);
if(cmp > 0) { // satisfy constraint (b)
yscale -= 1; // [that is, divisor *= 10]
int scl = checkScaleNonZero(preferredScale + yscale - xscale + mcp);
if (checkScaleNonZero((long) mcp + yscale - xscale) > 0) {
// assert newScale >= xscale
int raise = checkScaleNonZero((long) mcp + yscale - xscale);
long scaledXs;
if ((scaledXs = longMultiplyPowerTen(xs, raise)) == INFLATED) {
quotient = null;
if((mcp-1) >=0 && (mcp-1)<LONG_TEN_POWERS_TABLE.length) {
quotient = multiplyDivideAndRound(LONG_TEN_POWERS_TABLE[mcp-1], scaledX, ys, scl, roundingMode, checkScaleNonZero(preferredScale));
}
if(quotient==null) {
BigInteger rb = bigMultiplyPowerTen(scaledX,mcp-1);
quotient = divideAndRound(rb, ys,
scl, roundingMode, checkScaleNonZero(preferredScale));
}
} else {
quotient = divideAndRound(scaledXs, ys, scl, roundingMode, checkScaleNonZero(preferredScale));
}
} else {
int newScale = checkScaleNonZero((long) xscale - mcp);
// assert newScale >= yscale
if (newScale == yscale) { // easy case
quotient = divideAndRound(xs, ys, scl, roundingMode,checkScaleNonZero(preferredScale));
} else {
int raise = checkScaleNonZero((long) newScale - yscale);
long scaledYs;
if ((scaledYs = longMultiplyPowerTen(ys, raise)) == INFLATED) {
BigInteger rb = bigMultiplyPowerTen(ys,raise);
quotient = divideAndRound(BigInteger.valueOf(xs),
rb, scl, roundingMode,checkScaleNonZero(preferredScale));
} else {
quotient = divideAndRound(xs, scaledYs, scl, roundingMode,checkScaleNonZero(preferredScale));
}
}
}
} else {
// abs(scaledX) <= abs(ys)
// result is "scaledX * 10^msp / ys"
int scl = checkScaleNonZero(preferredScale + yscale - xscale + mcp);
if(cmp==0) {
// abs(scaleX)== abs(ys) => result will be scaled 10^mcp + correct sign
quotient = roundedTenPower(((scaledX < 0) == (ys < 0)) ? 1 : -1, mcp, scl, checkScaleNonZero(preferredScale));
} else {
// abs(scaledX) < abs(ys)
long scaledXs;
if ((scaledXs = longMultiplyPowerTen(scaledX, mcp)) == INFLATED) {
quotient = null;
if(mcp<LONG_TEN_POWERS_TABLE.length) {
quotient = multiplyDivideAndRound(LONG_TEN_POWERS_TABLE[mcp], scaledX, ys, scl, roundingMode, checkScaleNonZero(preferredScale));
}
if(quotient==null) {
BigInteger rb = bigMultiplyPowerTen(scaledX,mcp);
quotient = divideAndRound(rb, ys,
scl, roundingMode, checkScaleNonZero(preferredScale));
}
} else {
quotient = divideAndRound(scaledXs, ys, scl, roundingMode, checkScaleNonZero(preferredScale));
}
}
}
// doRound, here, only affects 1000000000 case.
return doRound(quotient,mc);
}
/**
* Returns a {@code BigDecimal} whose value is {@code (xs /
* ys)}, with rounding according to the context settings.
*/
private static BigDecimal divide(final long xs, int xscale, final long ys, int yscale, long preferredScale, MathContext mc) {
int mcp = mc.precision;
if(xscale <= yscale && yscale < 18 && mcp<18) {
return divideSmallFastPath(xs, xscale, ys, yscale, preferredScale, mc);
}
if (compareMagnitudeNormalized(xs, xscale, ys, yscale) > 0) {// satisfy constraint (b)
yscale -= 1; // [that is, divisor *= 10]
}
int roundingMode = mc.roundingMode.oldMode;
// In order to find out whether the divide generates the exact result,
// we avoid calling the above divide method. 'quotient' holds the
// return BigDecimal object whose scale will be set to 'scl'.
int scl = checkScaleNonZero(preferredScale + yscale - xscale + mcp);
BigDecimal quotient;
if (checkScaleNonZero((long) mcp + yscale - xscale) > 0) {
int raise = checkScaleNonZero((long) mcp + yscale - xscale);
long scaledXs;
if ((scaledXs = longMultiplyPowerTen(xs, raise)) == INFLATED) {
BigInteger rb = bigMultiplyPowerTen(xs,raise);
quotient = divideAndRound(rb, ys, scl, roundingMode, checkScaleNonZero(preferredScale));
} else {
quotient = divideAndRound(scaledXs, ys, scl, roundingMode, checkScaleNonZero(preferredScale));
}
} else {
int newScale = checkScaleNonZero((long) xscale - mcp);
// assert newScale >= yscale
if (newScale == yscale) { // easy case
quotient = divideAndRound(xs, ys, scl, roundingMode,checkScaleNonZero(preferredScale));
} else {
int raise = checkScaleNonZero((long) newScale - yscale);
long scaledYs;
if ((scaledYs = longMultiplyPowerTen(ys, raise)) == INFLATED) {
BigInteger rb = bigMultiplyPowerTen(ys,raise);
quotient = divideAndRound(BigInteger.valueOf(xs),
rb, scl, roundingMode,checkScaleNonZero(preferredScale));
} else {
quotient = divideAndRound(xs, scaledYs, scl, roundingMode,checkScaleNonZero(preferredScale));
}
}
}
// doRound, here, only affects 1000000000 case.
return doRound(quotient,mc);
}
/**
* Returns a {@code BigDecimal} whose value is {@code (xs /
* ys)}, with rounding according to the context settings.
*/
private static BigDecimal divide(BigInteger xs, int xscale, long ys, int yscale, long preferredScale, MathContext mc) {
// Normalize dividend & divisor so that both fall into [0.1, 0.999...]
if ((-compareMagnitudeNormalized(ys, yscale, xs, xscale)) > 0) {// satisfy constraint (b)
yscale -= 1; // [that is, divisor *= 10]
}
int mcp = mc.precision;
int roundingMode = mc.roundingMode.oldMode;
// In order to find out whether the divide generates the exact result,
// we avoid calling the above divide method. 'quotient' holds the
// return BigDecimal object whose scale will be set to 'scl'.
BigDecimal quotient;
int scl = checkScaleNonZero(preferredScale + yscale - xscale + mcp);
if (checkScaleNonZero((long) mcp + yscale - xscale) > 0) {
int raise = checkScaleNonZero((long) mcp + yscale - xscale);
BigInteger rb = bigMultiplyPowerTen(xs,raise);
quotient = divideAndRound(rb, ys, scl, roundingMode, checkScaleNonZero(preferredScale));
} else {
int newScale = checkScaleNonZero((long) xscale - mcp);
// assert newScale >= yscale
if (newScale == yscale) { // easy case
quotient = divideAndRound(xs, ys, scl, roundingMode,checkScaleNonZero(preferredScale));
} else {
int raise = checkScaleNonZero((long) newScale - yscale);
long scaledYs;
if ((scaledYs = longMultiplyPowerTen(ys, raise)) == INFLATED) {
BigInteger rb = bigMultiplyPowerTen(ys,raise);
quotient = divideAndRound(xs, rb, scl, roundingMode,checkScaleNonZero(preferredScale));
} else {
quotient = divideAndRound(xs, scaledYs, scl, roundingMode,checkScaleNonZero(preferredScale));
}
}
}
// doRound, here, only affects 1000000000 case.
return doRound(quotient, mc);
}
/**
* Returns a {@code BigDecimal} whose value is {@code (xs /
* ys)}, with rounding according to the context settings.
*/
private static BigDecimal divide(long xs, int xscale, BigInteger ys, int yscale, long preferredScale, MathContext mc) {
// Normalize dividend & divisor so that both fall into [0.1, 0.999...]
if (compareMagnitudeNormalized(xs, xscale, ys, yscale) > 0) {// satisfy constraint (b)
yscale -= 1; // [that is, divisor *= 10]
}
int mcp = mc.precision;
int roundingMode = mc.roundingMode.oldMode;
// In order to find out whether the divide generates the exact result,
// we avoid calling the above divide method. 'quotient' holds the
// return BigDecimal object whose scale will be set to 'scl'.
BigDecimal quotient;
int scl = checkScaleNonZero(preferredScale + yscale - xscale + mcp);
if (checkScaleNonZero((long) mcp + yscale - xscale) > 0) {
int raise = checkScaleNonZero((long) mcp + yscale - xscale);
BigInteger rb = bigMultiplyPowerTen(xs,raise);
quotient = divideAndRound(rb, ys, scl, roundingMode, checkScaleNonZero(preferredScale));
} else {
int newScale = checkScaleNonZero((long) xscale - mcp);
int raise = checkScaleNonZero((long) newScale - yscale);
BigInteger rb = bigMultiplyPowerTen(ys,raise);
quotient = divideAndRound(BigInteger.valueOf(xs), rb, scl, roundingMode,checkScaleNonZero(preferredScale));
}
// doRound, here, only affects 1000000000 case.
return doRound(quotient, mc);
}
/**
* Returns a {@code BigDecimal} whose value is {@code (xs /
* ys)}, with rounding according to the context settings.
*/
private static BigDecimal divide(BigInteger xs, int xscale, BigInteger ys, int yscale, long preferredScale, MathContext mc) {
// Normalize dividend & divisor so that both fall into [0.1, 0.999...]
if (compareMagnitudeNormalized(xs, xscale, ys, yscale) > 0) {// satisfy constraint (b)
yscale -= 1; // [that is, divisor *= 10]
}
int mcp = mc.precision;
int roundingMode = mc.roundingMode.oldMode;
// In order to find out whether the divide generates the exact result,
// we avoid calling the above divide method. 'quotient' holds the
// return BigDecimal object whose scale will be set to 'scl'.
BigDecimal quotient;
int scl = checkScaleNonZero(preferredScale + yscale - xscale + mcp);
if (checkScaleNonZero((long) mcp + yscale - xscale) > 0) {
int raise = checkScaleNonZero((long) mcp + yscale - xscale);
BigInteger rb = bigMultiplyPowerTen(xs,raise);
quotient = divideAndRound(rb, ys, scl, roundingMode, checkScaleNonZero(preferredScale));
} else {
int newScale = checkScaleNonZero((long) xscale - mcp);
int raise = checkScaleNonZero((long) newScale - yscale);
BigInteger rb = bigMultiplyPowerTen(ys,raise);
quotient = divideAndRound(xs, rb, scl, roundingMode,checkScaleNonZero(preferredScale));
}
// doRound, here, only affects 1000000000 case.
return doRound(quotient, mc);
}
/*
* performs divideAndRound for (dividend0*dividend1, divisor)
* returns null if quotient can't fit into long value;
*/
private static BigDecimal multiplyDivideAndRound(long dividend0, long dividend1, long divisor, int scale, int roundingMode,
int preferredScale) {
int qsign = Long.signum(dividend0)*Long.signum(dividend1)*Long.signum(divisor);
dividend0 = Math.abs(dividend0);
dividend1 = Math.abs(dividend1);
divisor = Math.abs(divisor);
// multiply dividend0 * dividend1
long d0_hi = dividend0 >>> 32;
long d0_lo = dividend0 & LONG_MASK;
long d1_hi = dividend1 >>> 32;
long d1_lo = dividend1 & LONG_MASK;
long product = d0_lo * d1_lo;
long d0 = product & LONG_MASK;
long d1 = product >>> 32;
product = d0_hi * d1_lo + d1;
d1 = product & LONG_MASK;
long d2 = product >>> 32;
product = d0_lo * d1_hi + d1;
d1 = product & LONG_MASK;
d2 += product >>> 32;
long d3 = d2>>>32;
d2 &= LONG_MASK;
product = d0_hi*d1_hi + d2;
d2 = product & LONG_MASK;
d3 = ((product>>>32) + d3) & LONG_MASK;
final long dividendHi = make64(d3,d2);
final long dividendLo = make64(d1,d0);
// divide
return divideAndRound128(dividendHi, dividendLo, divisor, qsign, scale, roundingMode, preferredScale);
}
private static final long DIV_NUM_BASE = (1L<<32); // Number base (32 bits).
/*
* divideAndRound 128-bit value by long divisor.
* returns null if quotient can't fit into long value;
* Specialized version of Knuth's division
*/
private static BigDecimal divideAndRound128(final long dividendHi, final long dividendLo, long divisor, int sign,
int scale, int roundingMode, int preferredScale) {
if (dividendHi >= divisor) {
return null;
}
final int shift = Long.numberOfLeadingZeros(divisor);
divisor <<= shift;
final long v1 = divisor >>> 32;
final long v0 = divisor & LONG_MASK;
long tmp = dividendLo << shift;
long u1 = tmp >>> 32;
long u0 = tmp & LONG_MASK;
tmp = (dividendHi << shift) | (dividendLo >>> 64 - shift);
long u2 = tmp & LONG_MASK;
long q1, r_tmp;
if (v1 == 1) {
q1 = tmp;
r_tmp = 0;
} else if (tmp >= 0) {
q1 = tmp / v1;
r_tmp = tmp - q1 * v1;
} else {
long[] rq = divRemNegativeLong(tmp, v1);
q1 = rq[1];
r_tmp = rq[0];
}
while(q1 >= DIV_NUM_BASE || unsignedLongCompare(q1*v0, make64(r_tmp, u1))) {
q1--;
r_tmp += v1;
if (r_tmp >= DIV_NUM_BASE)
break;
}
tmp = mulsub(u2,u1,v1,v0,q1);
u1 = tmp & LONG_MASK;
long q0;
if (v1 == 1) {
q0 = tmp;
r_tmp = 0;
} else if (tmp >= 0) {
q0 = tmp / v1;
r_tmp = tmp - q0 * v1;
} else {
long[] rq = divRemNegativeLong(tmp, v1);
q0 = rq[1];
r_tmp = rq[0];
}
while(q0 >= DIV_NUM_BASE || unsignedLongCompare(q0*v0,make64(r_tmp,u0))) {
q0--;
r_tmp += v1;
if (r_tmp >= DIV_NUM_BASE)
break;
}
if((int)q1 < 0) {
// result (which is positive and unsigned here)
// can't fit into long due to sign bit is used for value
MutableBigInteger mq = new MutableBigInteger(new int[]{(int)q1, (int)q0});
if (roundingMode == ROUND_DOWN && scale == preferredScale) {
return mq.toBigDecimal(sign, scale);
}
long r = mulsub(u1, u0, v1, v0, q0) >>> shift;
if (r != 0) {
if(needIncrement(divisor >>> shift, roundingMode, sign, mq, r)){
mq.add(MutableBigInteger.ONE);
}
return mq.toBigDecimal(sign, scale);
} else {
if (preferredScale != scale) {
BigInteger intVal = mq.toBigInteger(sign);
return createAndStripZerosToMatchScale(intVal,scale, preferredScale);
} else {
return mq.toBigDecimal(sign, scale);
}
}
}
long q = make64(q1,q0);
q*=sign;
if (roundingMode == ROUND_DOWN && scale == preferredScale)
return valueOf(q, scale);
long r = mulsub(u1, u0, v1, v0, q0) >>> shift;
if (r != 0) {
boolean increment = needIncrement(divisor >>> shift, roundingMode, sign, q, r);
return valueOf((increment ? q + sign : q), scale);
} else {
if (preferredScale != scale) {
return createAndStripZerosToMatchScale(q, scale, preferredScale);
} else {
return valueOf(q, scale);
}
}
}
/*
* calculate divideAndRound for ldividend*10^raise / divisor
* when abs(dividend)==abs(divisor);
*/
private static BigDecimal roundedTenPower(int qsign, int raise, int scale, int preferredScale) {
if (scale > preferredScale) {
int diff = scale - preferredScale;
if(diff < raise) {
return scaledTenPow(raise - diff, qsign, preferredScale);
} else {
return valueOf(qsign,scale-raise);
}
} else {
return scaledTenPow(raise, qsign, scale);
}
}
static BigDecimal scaledTenPow(int n, int sign, int scale) {
if (n < LONG_TEN_POWERS_TABLE.length)
return valueOf(sign*LONG_TEN_POWERS_TABLE[n],scale);
else {
BigInteger unscaledVal = bigTenToThe(n);
if(sign==-1) {
unscaledVal = unscaledVal.negate();
}
return new BigDecimal(unscaledVal, INFLATED, scale, n+1);
}
}
/**
* Calculate the quotient and remainder of dividing a negative long by
* another long.
*
* @param n the numerator; must be negative
* @param d the denominator; must not be unity
* @return a two-element {@long} array with the remainder and quotient in
* the initial and final elements, respectively
*/
private static long[] divRemNegativeLong(long n, long d) {
assert n < 0 : "Non-negative numerator " + n;
assert d != 1 : "Unity denominator";
// Approximate the quotient and remainder
long q = (n >>> 1) / (d >>> 1);
long r = n - q * d;
// Correct the approximation
while (r < 0) {
r += d;
q--;
}
while (r >= d) {
r -= d;
q++;
}
// n - q*d == r && 0 <= r < d, hence we're done.
return new long[] {r, q};
}
private static long make64(long hi, long lo) {
return hi<<32 | lo;
}
private static long mulsub(long u1, long u0, final long v1, final long v0, long q0) {
long tmp = u0 - q0*v0;
return make64(u1 + (tmp>>>32) - q0*v1,tmp & LONG_MASK);
}
private static boolean unsignedLongCompare(long one, long two) {
return (one+Long.MIN_VALUE) > (two+Long.MIN_VALUE);
}
private static boolean unsignedLongCompareEq(long one, long two) {
return (one+Long.MIN_VALUE) >= (two+Long.MIN_VALUE);
}
// Compare Normalize dividend & divisor so that both fall into [0.1, 0.999...]
private static int compareMagnitudeNormalized(long xs, int xscale, long ys, int yscale) {
// assert xs!=0 && ys!=0
int sdiff = xscale - yscale;
if (sdiff != 0) {
if (sdiff < 0) {
xs = longMultiplyPowerTen(xs, -sdiff);
} else { // sdiff > 0
ys = longMultiplyPowerTen(ys, sdiff);
}
}
if (xs != INFLATED)
return (ys != INFLATED) ? longCompareMagnitude(xs, ys) : -1;
else
return 1;
}
// Compare Normalize dividend & divisor so that both fall into [0.1, 0.999...]
private static int compareMagnitudeNormalized(long xs, int xscale, BigInteger ys, int yscale) {
// assert "ys can't be represented as long"
if (xs == 0)
return -1;
int sdiff = xscale - yscale;
if (sdiff < 0) {
if (longMultiplyPowerTen(xs, -sdiff) == INFLATED ) {
return bigMultiplyPowerTen(xs, -sdiff).compareMagnitude(ys);
}
}
return -1;
}
// Compare Normalize dividend & divisor so that both fall into [0.1, 0.999...]
private static int compareMagnitudeNormalized(BigInteger xs, int xscale, BigInteger ys, int yscale) {
int sdiff = xscale - yscale;
if (sdiff < 0) {
return bigMultiplyPowerTen(xs, -sdiff).compareMagnitude(ys);
} else { // sdiff >= 0
return xs.compareMagnitude(bigMultiplyPowerTen(ys, sdiff));
}
}
private static long multiply(long x, long y){
long product = x * y;
long ax = Math.abs(x);
long ay = Math.abs(y);
if (((ax | ay) >>> 31 == 0) || (y == 0) || (product / y == x)){
return product;
}
return INFLATED;
}
private static BigDecimal multiply(long x, long y, int scale) {
long product = multiply(x, y);
if(product!=INFLATED) {
return valueOf(product,scale);
}
return new BigDecimal(BigInteger.valueOf(x).multiply(y),INFLATED,scale,0);
}
private static BigDecimal multiply(long x, BigInteger y, int scale) {
if(x==0) {
return zeroValueOf(scale);
}
return new BigDecimal(y.multiply(x),INFLATED,scale,0);
}
private static BigDecimal multiply(BigInteger x, BigInteger y, int scale) {
return new BigDecimal(x.multiply(y),INFLATED,scale,0);
}
/**
* Multiplies two long values and rounds according {@code MathContext}
*/
private static BigDecimal multiplyAndRound(long x, long y, int scale, MathContext mc) {
long product = multiply(x, y);
if(product!=INFLATED) {
return doRound(product, scale, mc);
}
// attempt to do it in 128 bits
int rsign = 1;
if(x < 0) {
x = -x;
rsign = -1;
}
if(y < 0) {
y = -y;
rsign *= -1;
}
// multiply dividend0 * dividend1
long m0_hi = x >>> 32;
long m0_lo = x & LONG_MASK;
long m1_hi = y >>> 32;
long m1_lo = y & LONG_MASK;
product = m0_lo * m1_lo;
long m0 = product & LONG_MASK;
long m1 = product >>> 32;
product = m0_hi * m1_lo + m1;
m1 = product & LONG_MASK;
long m2 = product >>> 32;
product = m0_lo * m1_hi + m1;
m1 = product & LONG_MASK;
m2 += product >>> 32;
long m3 = m2>>>32;
m2 &= LONG_MASK;
product = m0_hi*m1_hi + m2;
m2 = product & LONG_MASK;
m3 = ((product>>>32) + m3) & LONG_MASK;
final long mHi = make64(m3,m2);
final long mLo = make64(m1,m0);
BigDecimal res = doRound128(mHi, mLo, rsign, scale, mc);
if(res!=null) {
return res;
}
res = new BigDecimal(BigInteger.valueOf(x).multiply(y*rsign), INFLATED, scale, 0);
return doRound(res,mc);
}
private static BigDecimal multiplyAndRound(long x, BigInteger y, int scale, MathContext mc) {
if(x==0) {
return zeroValueOf(scale);
}
return doRound(y.multiply(x), scale, mc);
}
private static BigDecimal multiplyAndRound(BigInteger x, BigInteger y, int scale, MathContext mc) {
return doRound(x.multiply(y), scale, mc);
}
/**
* rounds 128-bit value according {@code MathContext}
* returns null if result can't be repsented as compact BigDecimal.
*/
private static BigDecimal doRound128(long hi, long lo, int sign, int scale, MathContext mc) {
int mcp = mc.precision;
int drop;
BigDecimal res = null;
if(((drop = precision(hi, lo) - mcp) > 0)&&(drop<LONG_TEN_POWERS_TABLE.length)) {
scale = checkScaleNonZero((long)scale - drop);
res = divideAndRound128(hi, lo, LONG_TEN_POWERS_TABLE[drop], sign, scale, mc.roundingMode.oldMode, scale);
}
if(res!=null) {
return doRound(res,mc);
}
return null;
}
private static final long[][] LONGLONG_TEN_POWERS_TABLE = {
{ 0L, 0x8AC7230489E80000L }, //10^19
{ 0x5L, 0x6bc75e2d63100000L }, //10^20
{ 0x36L, 0x35c9adc5dea00000L }, //10^21
{ 0x21eL, 0x19e0c9bab2400000L }, //10^22
{ 0x152dL, 0x02c7e14af6800000L }, //10^23
{ 0xd3c2L, 0x1bcecceda1000000L }, //10^24
{ 0x84595L, 0x161401484a000000L }, //10^25
{ 0x52b7d2L, 0xdcc80cd2e4000000L }, //10^26
{ 0x33b2e3cL, 0x9fd0803ce8000000L }, //10^27
{ 0x204fce5eL, 0x3e25026110000000L }, //10^28
{ 0x1431e0faeL, 0x6d7217caa0000000L }, //10^29
{ 0xc9f2c9cd0L, 0x4674edea40000000L }, //10^30
{ 0x7e37be2022L, 0xc0914b2680000000L }, //10^31
{ 0x4ee2d6d415bL, 0x85acef8100000000L }, //10^32
{ 0x314dc6448d93L, 0x38c15b0a00000000L }, //10^33
{ 0x1ed09bead87c0L, 0x378d8e6400000000L }, //10^34
{ 0x13426172c74d82L, 0x2b878fe800000000L }, //10^35
{ 0xc097ce7bc90715L, 0xb34b9f1000000000L }, //10^36
{ 0x785ee10d5da46d9L, 0x00f436a000000000L }, //10^37
{ 0x4b3b4ca85a86c47aL, 0x098a224000000000L }, //10^38
};
/*
* returns precision of 128-bit value
*/
private static int precision(long hi, long lo){
if(hi==0) {
if(lo>=0) {
return longDigitLength(lo);
}
return (unsignedLongCompareEq(lo, LONGLONG_TEN_POWERS_TABLE[0][1])) ? 20 : 19;
// 0x8AC7230489E80000L = unsigned 2^19
}
int r = ((128 - Long.numberOfLeadingZeros(hi) + 1) * 1233) >>> 12;
int idx = r-19;
return (idx >= LONGLONG_TEN_POWERS_TABLE.length || longLongCompareMagnitude(hi, lo,
LONGLONG_TEN_POWERS_TABLE[idx][0], LONGLONG_TEN_POWERS_TABLE[idx][1])) ? r : r + 1;
}
/*
* returns true if 128 bit number <hi0,lo0> is less than <hi1,lo1>
* hi0 & hi1 should be non-negative
*/
private static boolean longLongCompareMagnitude(long hi0, long lo0, long hi1, long lo1) {
if(hi0!=hi1) {
return hi0<hi1;
}
return (lo0+Long.MIN_VALUE) <(lo1+Long.MIN_VALUE);
}
private static BigDecimal divide(long dividend, int dividendScale, long divisor, int divisorScale, int scale, int roundingMode) {
if (checkScale(dividend,(long)scale + divisorScale) > dividendScale) {
int newScale = scale + divisorScale;
int raise = newScale - dividendScale;
if(raise<LONG_TEN_POWERS_TABLE.length) {
long xs = dividend;
if ((xs = longMultiplyPowerTen(xs, raise)) != INFLATED) {
return divideAndRound(xs, divisor, scale, roundingMode, scale);
}
BigDecimal q = multiplyDivideAndRound(LONG_TEN_POWERS_TABLE[raise], dividend, divisor, scale, roundingMode, scale);
if(q!=null) {
return q;
}
}
BigInteger scaledDividend = bigMultiplyPowerTen(dividend, raise);
return divideAndRound(scaledDividend, divisor, scale, roundingMode, scale);
} else {
int newScale = checkScale(divisor,(long)dividendScale - scale);
int raise = newScale - divisorScale;
if(raise<LONG_TEN_POWERS_TABLE.length) {
long ys = divisor;
if ((ys = longMultiplyPowerTen(ys, raise)) != INFLATED) {
return divideAndRound(dividend, ys, scale, roundingMode, scale);
}
}
BigInteger scaledDivisor = bigMultiplyPowerTen(divisor, raise);
return divideAndRound(BigInteger.valueOf(dividend), scaledDivisor, scale, roundingMode, scale);
}
}
private static BigDecimal divide(BigInteger dividend, int dividendScale, long divisor, int divisorScale, int scale, int roundingMode) {
if (checkScale(dividend,(long)scale + divisorScale) > dividendScale) {
int newScale = scale + divisorScale;
int raise = newScale - dividendScale;
BigInteger scaledDividend = bigMultiplyPowerTen(dividend, raise);
return divideAndRound(scaledDividend, divisor, scale, roundingMode, scale);
} else {
int newScale = checkScale(divisor,(long)dividendScale - scale);
int raise = newScale - divisorScale;
if(raise<LONG_TEN_POWERS_TABLE.length) {
long ys = divisor;
if ((ys = longMultiplyPowerTen(ys, raise)) != INFLATED) {
return divideAndRound(dividend, ys, scale, roundingMode, scale);
}
}
BigInteger scaledDivisor = bigMultiplyPowerTen(divisor, raise);
return divideAndRound(dividend, scaledDivisor, scale, roundingMode, scale);
}
}
private static BigDecimal divide(long dividend, int dividendScale, BigInteger divisor, int divisorScale, int scale, int roundingMode) {
if (checkScale(dividend,(long)scale + divisorScale) > dividendScale) {
int newScale = scale + divisorScale;
int raise = newScale - dividendScale;
BigInteger scaledDividend = bigMultiplyPowerTen(dividend, raise);
return divideAndRound(scaledDividend, divisor, scale, roundingMode, scale);
} else {
int newScale = checkScale(divisor,(long)dividendScale - scale);
int raise = newScale - divisorScale;
BigInteger scaledDivisor = bigMultiplyPowerTen(divisor, raise);
return divideAndRound(BigInteger.valueOf(dividend), scaledDivisor, scale, roundingMode, scale);
}
}
private static BigDecimal divide(BigInteger dividend, int dividendScale, BigInteger divisor, int divisorScale, int scale, int roundingMode) {
if (checkScale(dividend,(long)scale + divisorScale) > dividendScale) {
int newScale = scale + divisorScale;
int raise = newScale - dividendScale;
BigInteger scaledDividend = bigMultiplyPowerTen(dividend, raise);
return divideAndRound(scaledDividend, divisor, scale, roundingMode, scale);
} else {
int newScale = checkScale(divisor,(long)dividendScale - scale);
int raise = newScale - divisorScale;
BigInteger scaledDivisor = bigMultiplyPowerTen(divisor, raise);
return divideAndRound(dividend, scaledDivisor, scale, roundingMode, scale);
}
}
}
| gpl-2.0 |
Litss/PlotSquared | src/main/java/com/intellectualcrafters/json/CookieList.java | 2265 | package com.intellectualcrafters.json;
import java.util.Iterator;
/**
* Convert a web browser cookie list string to a JSONObject and back.
*
* @author JSON.org
* @version 2014-05-03
*/
public class CookieList {
/**
* Convert a cookie list into a JSONObject. A cookie list is a sequence of name/value pairs. The names are separated
* from the values by '='. The pairs are separated by ';'. The names and the values will be unescaped, possibly
* converting '+' and '%' sequences.
*
* To add a cookie to a cooklist, cookielistJSONObject.put(cookieJSONObject.getString("name"),
* cookieJSONObject.getString("value"));
*
* @param string A cookie list string
*
* @return A JSONObject
*
* @throws JSONException
*/
public static JSONObject toJSONObject(final String string) throws JSONException {
final JSONObject jo = new JSONObject();
final JSONTokener x = new JSONTokener(string);
while (x.more()) {
final String name = Cookie.unescape(x.nextTo('='));
x.next('=');
jo.put(name, Cookie.unescape(x.nextTo(';')));
x.next();
}
return jo;
}
/**
* Convert a JSONObject into a cookie list. A cookie list is a sequence of name/value pairs. The names are separated
* from the values by '='. The pairs are separated by ';'. The characters '%', '+', '=', and ';' in the names and
* values are replaced by "%hh".
*
* @param jo A JSONObject
*
* @return A cookie list string
*
* @throws JSONException
*/
public static String toString(final JSONObject jo) throws JSONException {
boolean b = false;
final Iterator<String> keys = jo.keys();
String string;
final StringBuilder sb = new StringBuilder();
while (keys.hasNext()) {
string = keys.next();
if (!jo.isNull(string)) {
if (b) {
sb.append(';');
}
sb.append(Cookie.escape(string));
sb.append("=");
sb.append(Cookie.escape(jo.getString(string)));
b = true;
}
}
return sb.toString();
}
}
| gpl-3.0 |
s20121035/rk3288_android5.1_repo | external/liblzf/src/org/liblzf/CLZF.java | 10311 | /*
* Copyright (c) 2005 Oren J. Maurice <oymaurice@hazorea.org.il>
*
* Redistribution and use in source and binary forms, with or without modifica-
* tion, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MER-
* CHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPE-
* CIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTH-
* ERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License version 2 (the "GPL"), in which case the
* provisions of the GPL are applicable instead of the above. If you wish to
* allow the use of your version of this file only under the terms of the
* GPL and not to allow others to use your version of this file under the
* BSD license, indicate your decision by deleting the provisions above and
* replace them with the notice and other provisions required by the GPL. If
* you do not delete the provisions above, a recipient may use your version
* of this file under either the BSD or the GPL.
*/
// ported from C# to Java
package org.liblzf;
/// <summary>
/// Summary description for CLZF.
/// </summary>
public class CLZF
{
// CRC32 data & function
/*
static int []crc_32_tab = new int[]
{
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
0x2d02ef8d
};
public int crc32(int OldCRC,byte NewData)
{
return crc_32_tab[(OldCRC & 0xff) ^ NewData] ^ (OldCRC >> 8);
}
*/
/// <summary>
/// LZF Compressor
/// </summary>
static int HLOG=14;
static int HSIZE=(1<<14);
/*
* don't play with this unless you benchmark!
* decompression is not dependent on the hash function
* the hashing function might seem strange, just believe me
* it works ;)
*/
static int MAX_LIT=(1 << 5);
static int MAX_OFF=(1 << 13);
static int MAX_REF=((1 << 8) + (1 << 3));
static int FRST(byte[] Array,int ptr)
{
return (int)((((Array[ptr]) << 8) & 0xff00) | (Array[ptr+1] & 0xff));
}
static int NEXT(int v,byte[] Array,int ptr)
{
return ((v) << 8) | (Array[ptr+2] & 0xff);
}
static int IDX(int h)
{
return ((((h ^ (h << 5)) >> (int) (3*8 - HLOG)) - h*5) & (HSIZE - 1));
}
/*
* compressed format
*
* 000LLLLL <L+1> ; literal
* LLLOOOOO oooooooo ; backref L
* 111OOOOO LLLLLLLL oooooooo ; backref L+7
*
*/
public static int lzf_compress (byte[] in_data, int in_len,byte[] out_data, int out_len)
{
int c;
int []htab=new int[1<<14];
for (c=0;c<1<<14;c++)
{
htab[c]=0;
}
int hslot;
int iidx = 0;
int oidx = 0;
//byte *in_end = ip + in_len;
//byte *out_end = op + out_len;
int reference;
int hval = FRST (in_data,iidx);
int off;
int lit = 0;
for (;;)
{
if (iidx < in_len - 2)
{
hval = NEXT (hval, in_data,iidx);
hslot = IDX (hval);
reference = htab[hslot];
htab[hslot] = (int)iidx;
if ((off = iidx - reference - 1) < MAX_OFF
&& iidx + 4 < in_len
&& reference > 0
&& in_data[reference+0] == in_data[iidx+0]
&& in_data[reference+1] == in_data[iidx+1]
&& in_data[reference+2] == in_data[iidx+2]
)
{
/* match found at *reference++ */
int len = 2;
int maxlen = in_len - iidx - len;
maxlen = maxlen > MAX_REF ? MAX_REF : maxlen;
if (oidx + lit + 1 + 3 >= out_len)
return 0;
do
len++;
while (len < maxlen && in_data[reference+len] == in_data[iidx+len]);
if (lit!=0)
{
out_data[oidx++] = (byte)(lit - 1);
lit = -lit;
do
out_data[oidx++] = in_data[iidx+lit];
while ((++lit)!=0);
}
len -= 2;
iidx++;
if (len < 7)
{
out_data[oidx++] = (byte)((off >> 8) + (len << 5));
}
else
{
out_data[oidx++] = (byte)((off >> 8) + ( 7 << 5));
out_data[oidx++] = (byte)(len - 7);
}
out_data[oidx++] = (byte)off;
iidx += len-1;
hval = FRST (in_data,iidx);
hval = NEXT (hval,in_data, iidx);
htab[IDX (hval)] = iidx;
iidx++;
hval = NEXT (hval, in_data,iidx);
htab[IDX (hval)] = iidx;
iidx++;
continue;
}
}
else if (iidx == in_len)
break;
/* one more literal byte we must copy */
lit++;
iidx++;
if (lit == MAX_LIT)
{
if (oidx + 1 + MAX_LIT >= out_len)
return 0;
out_data[oidx++] = (byte)(MAX_LIT - 1);
lit = -lit;
do
out_data[oidx++] = in_data[iidx+lit];
while ((++lit)!=0);
}
}
if (lit!=0)
{
if (oidx + lit + 1 >= out_len)
return 0;
out_data[oidx++] = (byte)(lit - 1);
lit = -lit;
do
out_data[oidx++] = in_data[iidx+lit];
while ((++lit)!=0);
}
return (int)oidx;
}
/// <summary>
/// LZF Decompressor
/// </summary>
public static int lzf_decompress ( byte[] in_data, int in_len, byte[] out_data, int out_len)
{
int iidx=0;
int oidx=0;
do
{
int ctrl = in_data[iidx++] & 0xff;
if (ctrl < (1 << 5)) /* literal run */
{
ctrl++;
if (oidx + ctrl > out_len)
{
//SET_ERRNO (E2BIG);
return 0;
}
do
out_data[oidx++] = in_data[iidx++];
while ((--ctrl)!=0);
}
else /* back reference */
{
int len = ctrl >> 5;
int reference = (int)(oidx - ((ctrl & 0x1f) << 8) - 1);
if (len == 7)
len += in_data[iidx++] & 0xff;
reference -= in_data[iidx++] & 0xff;
if (oidx + len + 2 > out_len)
{
//SET_ERRNO (E2BIG);
return 0;
}
if (reference < 0)
{
//SET_ERRNO (EINVAL);
return 0;
}
out_data[oidx++]=out_data[reference++];
out_data[oidx++]=out_data[reference++];
do
out_data[oidx++]=out_data[reference++];
while ((--len)!=0);
}
}
while (iidx < in_len);
return (int)oidx;
}
public CLZF()
{
//
// TODO: Add ructor logic here
//
}
}
| gpl-3.0 |
lausuper/Excitement-Open-Platform | biutee/src/main/java/eu/excitementproject/eop/biutee/operations/updater/UpdaterForChangePredicateTruth.java | 1309 | package eu.excitementproject.eop.biutee.operations.updater;
import java.util.Map;
import eu.excitementproject.eop.biutee.rteflow.macro.FeatureUpdate;
import eu.excitementproject.eop.common.representation.parse.tree.TreeAndParentMap;
import eu.excitementproject.eop.transformations.operations.operations.GenerationOperation;
import eu.excitementproject.eop.transformations.operations.specifications.SubstituteNodeSpecification;
import eu.excitementproject.eop.transformations.representation.ExtendedInfo;
import eu.excitementproject.eop.transformations.representation.ExtendedNode;
import eu.excitementproject.eop.transformations.utilities.TeEngineMlException;
/**
*
* @author Asher Stern
* @since Jan 25, 2012
*
*/
public class UpdaterForChangePredicateTruth extends FeatureVectorUpdater<SubstituteNodeSpecification>
{
@Override
public Map<Integer, Double> updateFeatureVector(
Map<Integer, Double> originalFeatureVector,
FeatureUpdate featureUpdate,
TreeAndParentMap<ExtendedInfo, ExtendedNode> textTree,
TreeAndParentMap<ExtendedInfo, ExtendedNode> hypothesisTree,
GenerationOperation<ExtendedInfo, ExtendedNode> operation,
SubstituteNodeSpecification specification)
throws TeEngineMlException
{
return featureUpdate.forChangePredicateTruth(originalFeatureVector);
}
}
| gpl-3.0 |
farkam135/GoIV | app/src/main/java/com/kamron/pogoiv/activities/RecalibrateFragment.java | 6517 | package com.kamron.pogoiv.activities;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Point;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.widget.NestedScrollView;
import android.view.Display;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.kamron.pogoiv.BuildConfig;
import com.kamron.pogoiv.GoIVSettings;
import com.kamron.pogoiv.R;
import butterknife.BindView;
import butterknife.ButterKnife;
public class RecalibrateFragment extends Fragment {
private static final String URL_YOUTUBE_TUTORIAL = "https://www.youtube.com/embed/w7dNEW1FLjQ?rel=0";
@BindView(R.id.mainScrollView)
NestedScrollView mainScrollView;
@BindView(R.id.optimizationWarningLayout)
LinearLayout optimizationWarningLayout;
@BindView(R.id.shouldRunOptimizationAgainWarning)
TextView shouldRunOptimizationAgainWarning;
@BindView(R.id.neverRunOptimizationWarning)
TextView neverRunOptimizationWarning;
@BindView(R.id.nonStandardScreenWarning)
TextView nonStandardScreenWarning;
@BindView(R.id.recalibrationHelpButton)
Button recalibrationHelpButton;
@BindView(R.id.optimizationVideoTutorialLayout)
LinearLayout optimizationVideoTutorialLayout;
@BindView(R.id.optimizationVideoTutorial)
WebView optimizationVideoTutorial;
public RecalibrateFragment() {
super();
}
@Override
@Nullable
public View onCreateView(@NonNull LayoutInflater inflater,
@Nullable ViewGroup container,
@Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_recalibrate, container, false);
ButterKnife.bind(this, view);
return view;
}
@Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
initiateOptimizationWarning();
setupTutorialButton();
// Hide
if (BuildConfig.FLAVOR.toLowerCase().contains("offline")) {
optimizationVideoTutorial.setVisibility(View.GONE);
}
}
/**
* Show the optimization-warning and its components depending on if the user hasn't a manual screen calibration
* saved, if the calibration isn't updated and if the device has weird screen ratio.
*/
private void initiateOptimizationWarning() {
GoIVSettings settings = GoIVSettings.getInstance(getContext());
if (settings.hasUpToDateManualScanCalibration()) {
optimizationWarningLayout.setVisibility(View.GONE); // Ensure the layout isn't visible
} else {
optimizationWarningLayout.setVisibility(View.VISIBLE);
if (settings.hasManualScanCalibration()) {
// Has outdated calibration
shouldRunOptimizationAgainWarning.setVisibility(View.VISIBLE);
} else {
// Has never calibrated
neverRunOptimizationWarning.setVisibility(View.VISIBLE);
Activity activity = getActivity();
if (activity == null) {
return;
}
// If the screen ratio isn't standard the user must run calibration
Display display = activity.getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getRealSize(size);
float ratio = (float) size.x / size.y;
float standardRatio = 9 / 16f;
float tolerance = 1 / 400f;
if (ratio < (standardRatio - tolerance) || ratio > (standardRatio + tolerance)) {
nonStandardScreenWarning.setVisibility(View.VISIBLE);
}
}
}
}
/**
* Makes the help-buttons load and navigate to the tutorial youtube webview, or open the browser if using offline
* build.
*/
private void setupTutorialButton() {
View.OnClickListener tutorialListener = new View.OnClickListener() {
@Override public void onClick(View v) {
if (BuildConfig.FLAVOR.toLowerCase().contains("online")) {
if (optimizationVideoTutorialLayout.getVisibility() == View.GONE) {
optimizationVideoTutorialLayout.setVisibility(View.VISIBLE);
String frameVideo = "<html><iframe width=\"310\" height=\"480\" src=\""
+ URL_YOUTUBE_TUTORIAL
+ "\" frameborder=\"0\" gesture=\"media\" allow=\"encrypted-media\" "
+ "allowfullscreen></iframe></html>";
optimizationVideoTutorial.setWebViewClient(new WebViewClient() {
@Override
public boolean shouldOverrideUrlLoading(WebView view, String url) {
return false;
}
});
optimizationVideoTutorial.getSettings().setJavaScriptEnabled(true);
optimizationVideoTutorial.loadData(frameVideo, "text/html", "utf-8");
mainScrollView.post(new Runnable() {
@Override
public void run() {
mainScrollView.smoothScrollTo(0, optimizationVideoTutorial.getTop());
}
});
} else {
optimizationVideoTutorial.stopLoading();
optimizationVideoTutorialLayout.setVisibility(View.GONE);
}
} else {
// Running offline version, we cant load the webpage inserted into the app, we need to open browser.
Intent i = new Intent(Intent.ACTION_VIEW);
i.setData(Uri.parse(URL_YOUTUBE_TUTORIAL));
startActivity(i);
}
}
};
recalibrationHelpButton.setOnClickListener(tutorialListener);
}
}
| gpl-3.0 |
tkpb/tasks | src/main/java/org/tasks/ui/NavigationDrawerFragment.java | 9904 | package org.tasks.ui;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Rect;
import android.os.Bundle;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListView;
import com.todoroo.astrid.actfm.TagSettingsActivity;
import com.todoroo.astrid.activity.TaskListActivity;
import com.todoroo.astrid.activity.TaskListFragment;
import com.todoroo.astrid.adapter.FilterAdapter;
import com.todoroo.astrid.api.AstridApiConstants;
import com.todoroo.astrid.api.Filter;
import com.todoroo.astrid.api.FilterListItem;
import com.todoroo.astrid.core.OldTaskPreferences;
import com.todoroo.astrid.reminders.ReminderPreferences;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tasks.R;
import org.tasks.filters.FilterCounter;
import org.tasks.filters.FilterProvider;
import org.tasks.filters.NavigationDrawerAction;
import org.tasks.injection.InjectingFragment;
import org.tasks.location.GeofenceService;
import org.tasks.preferences.AppearancePreferences;
import org.tasks.preferences.Preferences;
import javax.inject.Inject;
import static com.todoroo.andlib.utility.AndroidUtilities.atLeastLollipop;
public class NavigationDrawerFragment extends InjectingFragment {
private static final Logger log = LoggerFactory.getLogger(NavigationDrawerFragment.class);
public static final int FRAGMENT_NAVIGATION_DRAWER = R.id.navigation_drawer;
public static final String TOKEN_LAST_SELECTED = "lastSelected"; //$NON-NLS-1$
public static final int REQUEST_NEW_LIST = 4;
public FilterAdapter adapter = null;
private final RefreshReceiver refreshReceiver = new RefreshReceiver();
/**
* A pointer to the current callbacks instance (the Activity).
*/
private OnFilterItemClickedListener mCallbacks;
private DrawerLayout mDrawerLayout;
private ListView mDrawerListView;
private View mFragmentContainerView;
private int mCurrentSelectedPosition = 0;
@Inject FilterCounter filterCounter;
@Inject FilterProvider filterProvider;
@Inject GeofenceService geofenceService;
@Inject Preferences preferences;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (savedInstanceState != null) {
mCurrentSelectedPosition = savedInstanceState.getInt(TOKEN_LAST_SELECTED);
}
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getActivity().setDefaultKeyMode(Activity.DEFAULT_KEYS_SEARCH_LOCAL);
setUpList();
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == FilterAdapter.REQUEST_SETTINGS && resultCode == Activity.RESULT_OK && data != null) {
if (data.getBooleanExtra(ReminderPreferences.TOGGLE_GEOFENCES, false)) {
if (preferences.geofencesEnabled()) {
geofenceService.setupGeofences();
} else {
geofenceService.cancelGeofences();
}
} else if (data.getBooleanExtra(ReminderPreferences.RESET_GEOFENCES, false)) {
geofenceService.setupGeofences();
}
if (data.getBooleanExtra(AppearancePreferences.FILTERS_CHANGED, false)) {
refresh();
}
if (data.getBooleanExtra(AppearancePreferences.FORCE_REFRESH, false) ||
data.getBooleanExtra(OldTaskPreferences.TOGGLE_DELETED, false)) {
getActivity().finish();
getActivity().startActivity(getActivity().getIntent());
}
} else if ((requestCode == NavigationDrawerFragment.REQUEST_NEW_LIST ||
requestCode == TaskListFragment.ACTIVITY_REQUEST_NEW_FILTER) &&
resultCode == Activity.RESULT_OK) {
if(data == null) {
return;
}
Filter newList = data.getParcelableExtra(TagSettingsActivity.TOKEN_NEW_FILTER);
if (newList != null) {
getActivity().getIntent().putExtra(TaskListActivity.TOKEN_SWITCH_TO_FILTER, newList);
clear();
}
} else {
super.onActivityResult(requestCode, resultCode, data);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View layout = inflater.inflate(R.layout.fragment_navigation_drawer, container, false);
if (atLeastLollipop()) {
((ScrimInsetsFrameLayout) layout.findViewById(R.id.scrim_layout)).setOnInsetsCallback(new ScrimInsetsFrameLayout.OnInsetsCallback() {
@Override
public void onInsetsChanged(Rect insets) {
mDrawerListView.setPadding(0, insets.top, 0, 0);
}
});
}
mDrawerListView = (ListView) layout.findViewById(android.R.id.list);
mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
}
});
mDrawerListView.setItemChecked(mCurrentSelectedPosition, true);
return layout;
}
private void setUpList() {
adapter = new FilterAdapter(filterProvider, filterCounter, getActivity(), mDrawerListView, true);
mDrawerListView.setAdapter(adapter);
registerForContextMenu(mDrawerListView);
}
public boolean isDrawerOpen() {
return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView);
}
/**
* Users of this fragment must call this method to set up the navigation drawer interactions.
*
* @param drawerLayout The DrawerLayout containing this fragment's UI.
*/
public void setUp(DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity().findViewById(FRAGMENT_NAVIGATION_DRAWER);
mDrawerLayout = drawerLayout;
// set a custom shadow that overlays the main content when the drawer opens
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
}
@Override
public void onPause() {
super.onPause();
if(adapter != null) {
adapter.unregisterRecevier();
}
try {
getActivity().unregisterReceiver(refreshReceiver);
} catch (IllegalArgumentException e) {
// Might not have fully initialized
log.error(e.getMessage(), e);
}
}
private void selectItem(int position) {
closeMenu();
FilterListItem item = adapter.getItem(position);
if (item instanceof Filter) {
mCurrentSelectedPosition = position;
if (mDrawerListView != null) {
mDrawerListView.setItemChecked(position, true);
}
if (mCallbacks != null) {
mCallbacks.onFilterItemClicked(item);
}
} else if (item instanceof NavigationDrawerAction) {
NavigationDrawerAction action = (NavigationDrawerAction) item;
if (action.requestCode > 0) {
startActivityForResult(action.intent, action.requestCode);
} else {
startActivity(action.intent);
}
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
mCallbacks = (OnFilterItemClickedListener) activity;
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = null;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(TOKEN_LAST_SELECTED, mCurrentSelectedPosition);
}
public void closeMenu() {
if (mDrawerLayout != null) {
mDrawerLayout.closeDrawer(mFragmentContainerView);
}
}
public void openDrawer() {
if (mDrawerLayout != null) {
mDrawerLayout.openDrawer(mFragmentContainerView);
}
}
public void refreshFilterCount() {
adapter.refreshFilterCount();
}
public interface OnFilterItemClickedListener {
boolean onFilterItemClicked(FilterListItem item);
}
public void clear() {
adapter.clear();
}
public void refresh() {
adapter.populateList();
}
@Override
public void onResume() {
super.onResume();
if(adapter != null) {
adapter.registerRecevier();
}
// also load sync actions
getActivity().registerReceiver(refreshReceiver,
new IntentFilter(AstridApiConstants.BROADCAST_EVENT_REFRESH));
}
/**
* Receiver which receives refresh intents
*
* @author Tim Su <tim@todoroo.com>
*
*/
protected class RefreshReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if(intent == null || !AstridApiConstants.BROADCAST_EVENT_REFRESH.equals(intent.getAction())) {
return;
}
Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
refresh();
}
});
}
}
}
}
| gpl-3.0 |
Zerrens/InterstellarOres | src/main/java/appeng/api/config/SecurityPermissions.java | 1111 | package appeng.api.config;
/**
* Represent the security systems basic permissions, these are not for anti-griefing, they are part of the mod as a
* gameplay feature.
*/
public enum SecurityPermissions
{
/**
* required to insert items into the network via terminal ( also used for machines based on the owner of the
* network, which is determined by its Security Block. )
*/
INJECT,
/**
* required to extract items from the network via terminal ( also used for machines based on the owner of the
* network, which is determined by its Security Block. )
*/
EXTRACT,
/**
* required to request crafting from the network via terminal.
*/
CRAFT,
/**
* required to modify automation, and make modifications to the networks physical layout.
*/
BUILD,
/**
* required to modify the security blocks settings.
*/
SECURITY;
final private String unlocalizedName = "gui.appliedenergistics2.security." + name().toLowerCase();
public String getUnlocalizedName()
{
return unlocalizedName + ".name";
}
public String getUnlocalizedTip()
{
return unlocalizedName + ".tip";
}
}
| gpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/DomainObjects/src/ims/nursing/assessment/domain/objects/EatingManualDexterity.java | 11935 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
/*
* This code was generated
* Copyright (C) 1995-2004 IMS MAXIMS plc. All rights reserved.
* IMS Development Environment (version 1.80 build 5007.25751)
* WARNING: DO NOT MODIFY the content of this file
* Generated: 16/04/2014, 12:34
*
*/
package ims.nursing.assessment.domain.objects;
/**
*
* @author Sinead McDermott
* Generated.
*/
public class EatingManualDexterity extends ims.domain.DomainObject implements java.io.Serializable {
public static final int CLASSID = 1012100023;
private static final long serialVersionUID = 1012100023L;
public static final String CLASSVERSION = "${ClassVersion}";
@Override
public boolean shouldCapQuery()
{
return true;
}
private ims.domain.lookups.LookupInstance manualDexterity;
private ims.domain.lookups.LookupInstance status;
public EatingManualDexterity (Integer id, int ver)
{
super(id, ver);
}
public EatingManualDexterity ()
{
super();
}
public EatingManualDexterity (Integer id, int ver, Boolean includeRecord)
{
super(id, ver, includeRecord);
}
public Class getRealDomainClass()
{
return ims.nursing.assessment.domain.objects.EatingManualDexterity.class;
}
public ims.domain.lookups.LookupInstance getManualDexterity() {
return manualDexterity;
}
public void setManualDexterity(ims.domain.lookups.LookupInstance manualDexterity) {
this.manualDexterity = manualDexterity;
}
public ims.domain.lookups.LookupInstance getStatus() {
return status;
}
public void setStatus(ims.domain.lookups.LookupInstance status) {
this.status = status;
}
/**
* isConfigurationObject
* Taken from the Usage property of the business object, this method will return
* a boolean indicating whether this is a configuration object or not
* Configuration = true, Instantiation = false
*/
public static boolean isConfigurationObject()
{
if ( "Instantiation".equals("Configuration") )
return true;
else
return false;
}
public int getClassId() {
return CLASSID;
}
public String getClassVersion()
{
return CLASSVERSION;
}
public String toAuditString()
{
StringBuffer auditStr = new StringBuffer();
auditStr.append("\r\n*manualDexterity* :");
if (manualDexterity != null)
auditStr.append(manualDexterity.getText());
auditStr.append("; ");
auditStr.append("\r\n*status* :");
if (status != null)
auditStr.append(status.getText());
auditStr.append("; ");
return auditStr.toString();
}
public String toXMLString()
{
return toXMLString(new java.util.HashMap());
}
public String toXMLString(java.util.HashMap domMap)
{
StringBuffer sb = new StringBuffer();
sb.append("<class type=\"" + this.getClass().getName() + "\" ");
sb.append(" id=\"" + this.getId() + "\"");
sb.append(" source=\"" + ims.configuration.EnvironmentConfig.getImportExportSourceName() + "\" ");
sb.append(" classVersion=\"" + this.getClassVersion() + "\" ");
sb.append(" component=\"" + this.getIsComponentClass() + "\" >");
if (domMap.get(this) == null)
{
domMap.put(this, this);
sb.append(this.fieldsToXMLString(domMap));
}
sb.append("</class>");
String keyClassName = "EatingManualDexterity";
String externalSource = ims.configuration.EnvironmentConfig.getImportExportSourceName();
ims.configuration.ImportedObject impObj = (ims.configuration.ImportedObject)domMap.get(keyClassName + "_" + externalSource + "_" + this.getId());
if (impObj == null)
{
impObj = new ims.configuration.ImportedObject();
impObj.setExternalId(this.getId());
impObj.setExternalSource(externalSource);
impObj.setDomainObject(this);
impObj.setLocalId(this.getId());
impObj.setClassName(keyClassName);
domMap.put(keyClassName + "_" + externalSource + "_" + this.getId(), impObj);
}
return sb.toString();
}
public String fieldsToXMLString(java.util.HashMap domMap)
{
StringBuffer sb = new StringBuffer();
if (this.getManualDexterity() != null)
{
sb.append("<manualDexterity>");
sb.append(this.getManualDexterity().toXMLString());
sb.append("</manualDexterity>");
}
if (this.getStatus() != null)
{
sb.append("<status>");
sb.append(this.getStatus().toXMLString());
sb.append("</status>");
}
return sb.toString();
}
public static java.util.List fromListXMLString(org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.List list, java.util.HashMap domMap) throws Exception
{
if (list == null)
list = new java.util.ArrayList();
fillListFromXMLString(list, el, factory, domMap);
return list;
}
public static java.util.Set fromSetXMLString(org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.Set set, java.util.HashMap domMap) throws Exception
{
if (set == null)
set = new java.util.HashSet();
fillSetFromXMLString(set, el, factory, domMap);
return set;
}
private static void fillSetFromXMLString(java.util.Set set, org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
if (el == null)
return;
java.util.List cl = el.elements("class");
int size = cl.size();
java.util.Set newSet = new java.util.HashSet();
for(int i=0; i<size; i++)
{
org.dom4j.Element itemEl = (org.dom4j.Element)cl.get(i);
EatingManualDexterity domainObject = getEatingManualDexterityfromXML(itemEl, factory, domMap);
if (domainObject == null)
{
continue;
}
//Trying to avoid the hibernate collection being marked as dirty via its public interface methods. (like add)
if (!set.contains(domainObject))
set.add(domainObject);
newSet.add(domainObject);
}
java.util.Set removedSet = new java.util.HashSet();
java.util.Iterator iter = set.iterator();
//Find out which objects need to be removed
while (iter.hasNext())
{
ims.domain.DomainObject o = (ims.domain.DomainObject)iter.next();
if ((o == null || o.getIsRIE() == null || !o.getIsRIE().booleanValue()) && !newSet.contains(o))
{
removedSet.add(o);
}
}
iter = removedSet.iterator();
//Remove the unwanted objects
while (iter.hasNext())
{
set.remove(iter.next());
}
}
private static void fillListFromXMLString(java.util.List list, org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
if (el == null)
return;
java.util.List cl = el.elements("class");
int size = cl.size();
for(int i=0; i<size; i++)
{
org.dom4j.Element itemEl = (org.dom4j.Element)cl.get(i);
EatingManualDexterity domainObject = getEatingManualDexterityfromXML(itemEl, factory, domMap);
if (domainObject == null)
{
continue;
}
int domIdx = list.indexOf(domainObject);
if (domIdx == -1)
{
list.add(i, domainObject);
}
else if (i != domIdx && i < list.size())
{
Object tmp = list.get(i);
list.set(i, list.get(domIdx));
list.set(domIdx, tmp);
}
}
//Remove all ones in domList where index > voCollection.size() as these should
//now represent the ones removed from the VO collection. No longer referenced.
int i1=list.size();
while (i1 > size)
{
list.remove(i1-1);
i1=list.size();
}
}
public static EatingManualDexterity getEatingManualDexterityfromXML(String xml, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
org.dom4j.Document doc = new org.dom4j.io.SAXReader().read(new org.xml.sax.InputSource(xml));
return getEatingManualDexterityfromXML(doc.getRootElement(), factory, domMap);
}
public static EatingManualDexterity getEatingManualDexterityfromXML(org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
if (el == null)
return null;
String className = el.attributeValue("type");
if (!EatingManualDexterity.class.getName().equals(className))
{
Class clz = Class.forName(className);
if (!EatingManualDexterity.class.isAssignableFrom(clz))
throw new Exception("Element of type = " + className + " cannot be imported using the EatingManualDexterity class");
String shortClassName = className.substring(className.lastIndexOf(".")+1);
String methodName = "get" + shortClassName + "fromXML";
java.lang.reflect.Method m = clz.getMethod(methodName, new Class[]{org.dom4j.Element.class, ims.domain.DomainFactory.class, java.util.HashMap.class});
return (EatingManualDexterity)m.invoke(null, new Object[]{el, factory, domMap});
}
String impVersion = el.attributeValue("classVersion");
if(!impVersion.equals(EatingManualDexterity.CLASSVERSION))
{
throw new Exception("Incompatible class structure found. Cannot import instance.");
}
EatingManualDexterity ret = null;
int extId = Integer.parseInt(el.attributeValue("id"));
String externalSource = el.attributeValue("source");
ret = (EatingManualDexterity)factory.getImportedDomainObject(EatingManualDexterity.class, externalSource, extId);
if (ret == null)
{
ret = new EatingManualDexterity();
}
String keyClassName = "EatingManualDexterity";
ims.configuration.ImportedObject impObj = (ims.configuration.ImportedObject)domMap.get(keyClassName + "_" + externalSource + "_" + extId);
if (impObj != null)
{
return (EatingManualDexterity)impObj.getDomainObject();
}
else
{
impObj = new ims.configuration.ImportedObject();
impObj.setExternalId(extId);
impObj.setExternalSource(externalSource);
impObj.setDomainObject(ret);
domMap.put(keyClassName + "_" + externalSource + "_" + extId, impObj);
}
fillFieldsfromXML(el, factory, ret, domMap);
return ret;
}
public static void fillFieldsfromXML(org.dom4j.Element el, ims.domain.DomainFactory factory, EatingManualDexterity obj, java.util.HashMap domMap) throws Exception
{
org.dom4j.Element fldEl;
fldEl = el.element("manualDexterity");
if(fldEl != null)
{
fldEl = fldEl.element("lki");
obj.setManualDexterity(ims.domain.lookups.LookupInstance.fromXMLString(fldEl, factory));
}
fldEl = el.element("status");
if(fldEl != null)
{
fldEl = fldEl.element("lki");
obj.setStatus(ims.domain.lookups.LookupInstance.fromXMLString(fldEl, factory));
}
}
public static String[] getCollectionFields()
{
return new String[]{
};
}
public static class FieldNames
{
public static final String ID = "id";
public static final String ManualDexterity = "manualDexterity";
public static final String Status = "status";
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/Core/src/ims/core/forms/charttypedetails/BaseAccessLogic.java | 3799 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.core.forms.charttypedetails;
import java.io.Serializable;
import ims.framework.Context;
import ims.framework.FormName;
import ims.framework.FormAccessLogic;
public class BaseAccessLogic extends FormAccessLogic implements Serializable
{
private static final long serialVersionUID = 1L;
public final void setContext(Context context, FormName formName)
{
form = new CurrentForm(new GlobalContext(context), new CurrentForms());
engine = new CurrentEngine(formName);
}
public boolean isAccessible()
{
return true;
}
public boolean isReadOnly()
{
return false;
}
public CurrentEngine engine;
public CurrentForm form;
public final static class CurrentForm implements Serializable
{
private static final long serialVersionUID = 1L;
CurrentForm(GlobalContext globalcontext, CurrentForms forms)
{
this.globalcontext = globalcontext;
this.forms = forms;
}
public final GlobalContext getGlobalContext()
{
return globalcontext;
}
public final CurrentForms getForms()
{
return forms;
}
private GlobalContext globalcontext;
private CurrentForms forms;
}
public final static class CurrentEngine implements Serializable
{
private static final long serialVersionUID = 1L;
CurrentEngine(FormName formName)
{
this.formName = formName;
}
public final FormName getFormName()
{
return formName;
}
private FormName formName;
}
public static final class CurrentForms implements Serializable
{
private static final long serialVersionUID = 1L;
protected final class LocalFormName extends FormName
{
private static final long serialVersionUID = 1L;
protected LocalFormName(int value)
{
super(value);
}
}
private CurrentForms()
{
Core = new CoreForms();
}
public final class CoreForms implements Serializable
{
private static final long serialVersionUID = 1L;
private CoreForms()
{
ChartTypeList = new LocalFormName(102233);
DataTypeSearch = new LocalFormName(102235);
}
public final FormName ChartTypeList;
public final FormName DataTypeSearch;
}
public CoreForms Core;
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ValueObjects/src/ims/admin/vo/lookups/ContextVariableValType.java | 6353 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.admin.vo.lookups;
import ims.framework.cn.data.TreeNode;
import java.util.ArrayList;
import ims.framework.utils.Image;
import ims.framework.utils.Color;
public class ContextVariableValType extends ims.vo.LookupInstVo implements TreeNode
{
private static final long serialVersionUID = 1L;
public ContextVariableValType()
{
super();
}
public ContextVariableValType(int id)
{
super(id, "", true);
}
public ContextVariableValType(int id, String text, boolean active)
{
super(id, text, active, null, null, null);
}
public ContextVariableValType(int id, String text, boolean active, ContextVariableValType parent, Image image)
{
super(id, text, active, parent, image);
}
public ContextVariableValType(int id, String text, boolean active, ContextVariableValType parent, Image image, Color color)
{
super(id, text, active, parent, image, color);
}
public ContextVariableValType(int id, String text, boolean active, ContextVariableValType parent, Image image, Color color, int order)
{
super(id, text, active, parent, image, color, order);
}
public static ContextVariableValType buildLookup(ims.vo.LookupInstanceBean bean)
{
return new ContextVariableValType(bean.getId(), bean.getText(), bean.isActive());
}
public String toString()
{
if(getText() != null)
return getText();
return "";
}
public TreeNode getParentNode()
{
return (ContextVariableValType)super.getParentInstance();
}
public ContextVariableValType getParent()
{
return (ContextVariableValType)super.getParentInstance();
}
public void setParent(ContextVariableValType parent)
{
super.setParentInstance(parent);
}
public TreeNode[] getChildren()
{
ArrayList children = super.getChildInstances();
ContextVariableValType[] typedChildren = new ContextVariableValType[children.size()];
for (int i = 0; i < children.size(); i++)
{
typedChildren[i] = (ContextVariableValType)children.get(i);
}
return typedChildren;
}
public int addChild(TreeNode child)
{
if (child instanceof ContextVariableValType)
{
super.addChild((ContextVariableValType)child);
}
return super.getChildInstances().size();
}
public int removeChild(TreeNode child)
{
if (child instanceof ContextVariableValType)
{
super.removeChild((ContextVariableValType)child);
}
return super.getChildInstances().size();
}
public Image getExpandedImage()
{
return super.getImage();
}
public Image getCollapsedImage()
{
return super.getImage();
}
public static ims.framework.IItemCollection getNegativeInstancesAsIItemCollection()
{
ContextVariableValTypeCollection result = new ContextVariableValTypeCollection();
result.add(BUSINESS_OBJECT);
result.add(LOOKUP_INSTANCE);
result.add(LOOKUP_TYPE);
result.add(SIMPLE);
return result;
}
public static ContextVariableValType[] getNegativeInstances()
{
ContextVariableValType[] instances = new ContextVariableValType[4];
instances[0] = BUSINESS_OBJECT;
instances[1] = LOOKUP_INSTANCE;
instances[2] = LOOKUP_TYPE;
instances[3] = SIMPLE;
return instances;
}
public static String[] getNegativeInstanceNames()
{
String[] negativeInstances = new String[4];
negativeInstances[0] = "BUSINESS_OBJECT";
negativeInstances[1] = "LOOKUP_INSTANCE";
negativeInstances[2] = "LOOKUP_TYPE";
negativeInstances[3] = "SIMPLE";
return negativeInstances;
}
public static ContextVariableValType getNegativeInstance(String name)
{
if(name == null)
return null;
String[] negativeInstances = getNegativeInstanceNames();
for (int i = 0; i < negativeInstances.length; i++)
{
if(negativeInstances[i].equals(name))
return getNegativeInstances()[i];
}
return null;
}
public static ContextVariableValType getNegativeInstance(Integer id)
{
if(id == null)
return null;
ContextVariableValType[] negativeInstances = getNegativeInstances();
for (int i = 0; i < negativeInstances.length; i++)
{
if(negativeInstances[i].getID() == id)
return negativeInstances[i];
}
return null;
}
public int getTypeId()
{
return TYPE_ID;
}
public static final int TYPE_ID = 1031011;
public static final ContextVariableValType BUSINESS_OBJECT = new ContextVariableValType(-1180, "Business Object", true, null, null, Color.Default);
public static final ContextVariableValType LOOKUP_INSTANCE = new ContextVariableValType(-1181, "Lookup Instance", true, null, null, Color.Default);
public static final ContextVariableValType LOOKUP_TYPE = new ContextVariableValType(-1182, "Lookup Type", true, null, null, Color.Default);
public static final ContextVariableValType SIMPLE = new ContextVariableValType(-1183, "Simple", true, null, null, Color.Default);
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ValueObjects/src/ims/core/vo/PendingElectiveAdmissionAdmitVoCollection.java | 8722 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.core.vo;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import ims.framework.enumerations.SortOrder;
/**
* Linked to core.admin.pas.PendingElectiveAdmission business object (ID: 1014100012).
*/
public class PendingElectiveAdmissionAdmitVoCollection extends ims.vo.ValueObjectCollection implements ims.vo.ImsCloneable, Iterable<PendingElectiveAdmissionAdmitVo>
{
private static final long serialVersionUID = 1L;
private ArrayList<PendingElectiveAdmissionAdmitVo> col = new ArrayList<PendingElectiveAdmissionAdmitVo>();
public String getBoClassName()
{
return "ims.core.admin.pas.domain.objects.PendingElectiveAdmission";
}
public boolean add(PendingElectiveAdmissionAdmitVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
return this.col.add(value);
}
return false;
}
public boolean add(int index, PendingElectiveAdmissionAdmitVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
this.col.add(index, value);
return true;
}
return false;
}
public void clear()
{
this.col.clear();
}
public void remove(int index)
{
this.col.remove(index);
}
public int size()
{
return this.col.size();
}
public int indexOf(PendingElectiveAdmissionAdmitVo instance)
{
return col.indexOf(instance);
}
public PendingElectiveAdmissionAdmitVo get(int index)
{
return this.col.get(index);
}
public boolean set(int index, PendingElectiveAdmissionAdmitVo value)
{
if(value == null)
return false;
this.col.set(index, value);
return true;
}
public void remove(PendingElectiveAdmissionAdmitVo instance)
{
if(instance != null)
{
int index = indexOf(instance);
if(index >= 0)
remove(index);
}
}
public boolean contains(PendingElectiveAdmissionAdmitVo instance)
{
return indexOf(instance) >= 0;
}
public Object clone()
{
PendingElectiveAdmissionAdmitVoCollection clone = new PendingElectiveAdmissionAdmitVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
if(this.col.get(x) != null)
clone.col.add((PendingElectiveAdmissionAdmitVo)this.col.get(x).clone());
else
clone.col.add(null);
}
return clone;
}
public boolean isValidated()
{
for(int x = 0; x < col.size(); x++)
if(!this.col.get(x).isValidated())
return false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(col.size() == 0)
return null;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
for(int x = 0; x < col.size(); x++)
{
String[] listOfOtherErrors = this.col.get(x).validate();
if(listOfOtherErrors != null)
{
for(int y = 0; y < listOfOtherErrors.length; y++)
{
listOfErrors.add(listOfOtherErrors[y]);
}
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
return null;
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
return result;
}
public PendingElectiveAdmissionAdmitVoCollection sort()
{
return sort(SortOrder.ASCENDING);
}
public PendingElectiveAdmissionAdmitVoCollection sort(boolean caseInsensitive)
{
return sort(SortOrder.ASCENDING, caseInsensitive);
}
public PendingElectiveAdmissionAdmitVoCollection sort(SortOrder order)
{
return sort(new PendingElectiveAdmissionAdmitVoComparator(order));
}
public PendingElectiveAdmissionAdmitVoCollection sort(SortOrder order, boolean caseInsensitive)
{
return sort(new PendingElectiveAdmissionAdmitVoComparator(order, caseInsensitive));
}
@SuppressWarnings("unchecked")
public PendingElectiveAdmissionAdmitVoCollection sort(Comparator comparator)
{
Collections.sort(col, comparator);
return this;
}
public ims.core.admin.pas.vo.PendingElectiveAdmissionRefVoCollection toRefVoCollection()
{
ims.core.admin.pas.vo.PendingElectiveAdmissionRefVoCollection result = new ims.core.admin.pas.vo.PendingElectiveAdmissionRefVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
result.add(this.col.get(x));
}
return result;
}
public PendingElectiveAdmissionAdmitVo[] toArray()
{
PendingElectiveAdmissionAdmitVo[] arr = new PendingElectiveAdmissionAdmitVo[col.size()];
col.toArray(arr);
return arr;
}
public Iterator<PendingElectiveAdmissionAdmitVo> iterator()
{
return col.iterator();
}
@Override
protected ArrayList getTypedCollection()
{
return col;
}
private class PendingElectiveAdmissionAdmitVoComparator implements Comparator
{
private int direction = 1;
private boolean caseInsensitive = true;
public PendingElectiveAdmissionAdmitVoComparator()
{
this(SortOrder.ASCENDING);
}
public PendingElectiveAdmissionAdmitVoComparator(SortOrder order)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
}
public PendingElectiveAdmissionAdmitVoComparator(SortOrder order, boolean caseInsensitive)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
this.caseInsensitive = caseInsensitive;
}
public int compare(Object obj1, Object obj2)
{
PendingElectiveAdmissionAdmitVo voObj1 = (PendingElectiveAdmissionAdmitVo)obj1;
PendingElectiveAdmissionAdmitVo voObj2 = (PendingElectiveAdmissionAdmitVo)obj2;
return direction*(voObj1.compareTo(voObj2, this.caseInsensitive));
}
public boolean equals(Object obj)
{
return false;
}
}
public ims.core.vo.beans.PendingElectiveAdmissionAdmitVoBean[] getBeanCollection()
{
return getBeanCollectionArray();
}
public ims.core.vo.beans.PendingElectiveAdmissionAdmitVoBean[] getBeanCollectionArray()
{
ims.core.vo.beans.PendingElectiveAdmissionAdmitVoBean[] result = new ims.core.vo.beans.PendingElectiveAdmissionAdmitVoBean[col.size()];
for(int i = 0; i < col.size(); i++)
{
PendingElectiveAdmissionAdmitVo vo = ((PendingElectiveAdmissionAdmitVo)col.get(i));
result[i] = (ims.core.vo.beans.PendingElectiveAdmissionAdmitVoBean)vo.getBean();
}
return result;
}
public static PendingElectiveAdmissionAdmitVoCollection buildFromBeanCollection(java.util.Collection beans)
{
PendingElectiveAdmissionAdmitVoCollection coll = new PendingElectiveAdmissionAdmitVoCollection();
if(beans == null)
return coll;
java.util.Iterator iter = beans.iterator();
while (iter.hasNext())
{
coll.add(((ims.core.vo.beans.PendingElectiveAdmissionAdmitVoBean)iter.next()).buildVo());
}
return coll;
}
public static PendingElectiveAdmissionAdmitVoCollection buildFromBeanCollection(ims.core.vo.beans.PendingElectiveAdmissionAdmitVoBean[] beans)
{
PendingElectiveAdmissionAdmitVoCollection coll = new PendingElectiveAdmissionAdmitVoCollection();
if(beans == null)
return coll;
for(int x = 0; x < beans.length; x++)
{
coll.add(beans[x].buildVo());
}
return coll;
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/Core/src/ims/core/forms/uploaddocumentdialog/Handlers.java | 5380 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.core.forms.uploaddocumentdialog;
import ims.framework.delegates.*;
abstract public class Handlers implements ims.framework.UILogic, IFormUILogicCode
{
abstract protected void bindcmbSourceLookup();
abstract protected void defaultcmbSourceLookupValue();
abstract protected void bindcmbTypeLookup();
abstract protected void defaultcmbTypeLookupValue();
abstract protected void onCustomEvent(ims.framework.CustomEvent event) throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onMessageBoxClosed(int messageBoxId, ims.framework.enumerations.DialogResult result) throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onFormOpen(Object[] args) throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnCancelClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnSaveClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void oncmbSourceValueSet(Object value);
abstract protected void oncmbTypeValueSet(Object value);
abstract protected void onCcUploadValueChanged() throws ims.framework.exceptions.PresentationLogicException;
public final void setContext(ims.framework.UIEngine engine, GenForm form)
{
this.engine = engine;
this.form = form;
this.form.setCustomEventEvent(new CustomEvent()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.CustomEvent event) throws ims.framework.exceptions.PresentationLogicException
{
onCustomEvent(event);
}
});
this.form.setMessageBoxClosedEvent(new MessageBoxClosed()
{
private static final long serialVersionUID = 1L;
public void handle(int messageBoxId, ims.framework.enumerations.DialogResult result) throws ims.framework.exceptions.PresentationLogicException
{
onMessageBoxClosed(messageBoxId, result);
}
});
this.form.setFormOpenEvent(new FormOpen()
{
private static final long serialVersionUID = 1L;
public void handle(Object[] args) throws ims.framework.exceptions.PresentationLogicException
{
bindLookups();
onFormOpen(args);
}
});
this.form.btnCancel().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnCancelClick();
}
});
this.form.btnSave().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnSaveClick();
}
});
this.form.cmbSource().setValueSetEvent(new ComboBoxValueSet()
{
private static final long serialVersionUID = 1L;
public void handle(Object value)
{
oncmbSourceValueSet(value);
}
});
this.form.cmbType().setValueSetEvent(new ComboBoxValueSet()
{
private static final long serialVersionUID = 1L;
public void handle(Object value)
{
oncmbTypeValueSet(value);
}
});
this.form.setccUploadValueChangedEvent(new ValueChanged()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onCcUploadValueChanged();
}
});
}
protected void bindLookups()
{
bindcmbSourceLookup();
bindcmbTypeLookup();
}
protected void rebindAllLookups()
{
bindcmbSourceLookup();
bindcmbTypeLookup();
}
protected void defaultAllLookupValues()
{
defaultcmbSourceLookupValue();
defaultcmbTypeLookupValue();
}
public void free()
{
this.engine = null;
this.form = null;
}
protected ims.framework.UIEngine engine;
protected GenForm form;
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/RefMan/src/ims/RefMan/forms/inpatientclinicalcodingworklist/IFormUILogicCode.java | 342 | // This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.RefMan.forms.inpatientclinicalcodingworklist;
public interface IFormUILogicCode
{
// No methods yet.
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ValueObjects/src/ims/admin/vo/beans/ElectiveListConfigSearchCriteriaVoBean.java | 5561 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.admin.vo.beans;
public class ElectiveListConfigSearchCriteriaVoBean extends ims.vo.ValueObjectBean
{
public ElectiveListConfigSearchCriteriaVoBean()
{
}
public ElectiveListConfigSearchCriteriaVoBean(ims.admin.vo.ElectiveListConfigSearchCriteriaVo vo)
{
this.waitinglistname = vo.getWaitingListName();
this.service = vo.getService() == null ? null : (ims.admin.vo.beans.ServiceForElectiveListConfigVoBean)vo.getService().getBean();
this.consultant = vo.getConsultant() == null ? null : (ims.core.vo.beans.HcpLiteVoBean)vo.getConsultant().getBean();
this.hospital = vo.getHospital() == null ? null : (ims.core.vo.beans.LocationLiteVoBean)vo.getHospital().getBean();
this.active = vo.getActive();
this.fromdate = vo.getFromDate() == null ? null : (ims.framework.utils.beans.DateBean)vo.getFromDate().getBean();
this.todate = vo.getToDate() == null ? null : (ims.framework.utils.beans.DateBean)vo.getToDate().getBean();
}
public void populate(ims.vo.ValueObjectBeanMap map, ims.admin.vo.ElectiveListConfigSearchCriteriaVo vo)
{
this.waitinglistname = vo.getWaitingListName();
this.service = vo.getService() == null ? null : (ims.admin.vo.beans.ServiceForElectiveListConfigVoBean)vo.getService().getBean(map);
this.consultant = vo.getConsultant() == null ? null : (ims.core.vo.beans.HcpLiteVoBean)vo.getConsultant().getBean(map);
this.hospital = vo.getHospital() == null ? null : (ims.core.vo.beans.LocationLiteVoBean)vo.getHospital().getBean(map);
this.active = vo.getActive();
this.fromdate = vo.getFromDate() == null ? null : (ims.framework.utils.beans.DateBean)vo.getFromDate().getBean();
this.todate = vo.getToDate() == null ? null : (ims.framework.utils.beans.DateBean)vo.getToDate().getBean();
}
public ims.admin.vo.ElectiveListConfigSearchCriteriaVo buildVo()
{
return this.buildVo(new ims.vo.ValueObjectBeanMap());
}
public ims.admin.vo.ElectiveListConfigSearchCriteriaVo buildVo(ims.vo.ValueObjectBeanMap map)
{
ims.admin.vo.ElectiveListConfigSearchCriteriaVo vo = null;
if(map != null)
vo = (ims.admin.vo.ElectiveListConfigSearchCriteriaVo)map.getValueObject(this);
if(vo == null)
{
vo = new ims.admin.vo.ElectiveListConfigSearchCriteriaVo();
map.addValueObject(this, vo);
vo.populate(map, this);
}
return vo;
}
public String getWaitingListName()
{
return this.waitinglistname;
}
public void setWaitingListName(String value)
{
this.waitinglistname = value;
}
public ims.admin.vo.beans.ServiceForElectiveListConfigVoBean getService()
{
return this.service;
}
public void setService(ims.admin.vo.beans.ServiceForElectiveListConfigVoBean value)
{
this.service = value;
}
public ims.core.vo.beans.HcpLiteVoBean getConsultant()
{
return this.consultant;
}
public void setConsultant(ims.core.vo.beans.HcpLiteVoBean value)
{
this.consultant = value;
}
public ims.core.vo.beans.LocationLiteVoBean getHospital()
{
return this.hospital;
}
public void setHospital(ims.core.vo.beans.LocationLiteVoBean value)
{
this.hospital = value;
}
public Boolean getActive()
{
return this.active;
}
public void setActive(Boolean value)
{
this.active = value;
}
public ims.framework.utils.beans.DateBean getFromDate()
{
return this.fromdate;
}
public void setFromDate(ims.framework.utils.beans.DateBean value)
{
this.fromdate = value;
}
public ims.framework.utils.beans.DateBean getToDate()
{
return this.todate;
}
public void setToDate(ims.framework.utils.beans.DateBean value)
{
this.todate = value;
}
private String waitinglistname;
private ims.admin.vo.beans.ServiceForElectiveListConfigVoBean service;
private ims.core.vo.beans.HcpLiteVoBean consultant;
private ims.core.vo.beans.LocationLiteVoBean hospital;
private Boolean active;
private ims.framework.utils.beans.DateBean fromdate;
private ims.framework.utils.beans.DateBean todate;
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ValueObjects/src/ims/core/vo/beans/AssessmentQuestionShortVoBean.java | 5144 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.core.vo.beans;
public class AssessmentQuestionShortVoBean extends ims.vo.ValueObjectBean
{
public AssessmentQuestionShortVoBean()
{
}
public AssessmentQuestionShortVoBean(ims.core.vo.AssessmentQuestionShortVo vo)
{
this.id = vo.getBoId();
this.version = vo.getBoVersion();
this.ismandatory = vo.getIsMandatory();
this.activestatus = vo.getActiveStatus() == null ? null : (ims.vo.LookupInstanceBean)vo.getActiveStatus().getBean();
this.sequence = vo.getSequence();
this.allowsmultipleanswers = vo.getAllowsMultipleAnswers();
this.isnonstandard = vo.getIsNonStandard();
this.url = vo.getURL();
this.protocol = vo.getProtocol();
this.legendtext = vo.getLegendText();
}
public void populate(ims.vo.ValueObjectBeanMap map, ims.core.vo.AssessmentQuestionShortVo vo)
{
this.id = vo.getBoId();
this.version = vo.getBoVersion();
this.ismandatory = vo.getIsMandatory();
this.activestatus = vo.getActiveStatus() == null ? null : (ims.vo.LookupInstanceBean)vo.getActiveStatus().getBean();
this.sequence = vo.getSequence();
this.allowsmultipleanswers = vo.getAllowsMultipleAnswers();
this.isnonstandard = vo.getIsNonStandard();
this.url = vo.getURL();
this.protocol = vo.getProtocol();
this.legendtext = vo.getLegendText();
}
public ims.core.vo.AssessmentQuestionShortVo buildVo()
{
return this.buildVo(new ims.vo.ValueObjectBeanMap());
}
public ims.core.vo.AssessmentQuestionShortVo buildVo(ims.vo.ValueObjectBeanMap map)
{
ims.core.vo.AssessmentQuestionShortVo vo = null;
if(map != null)
vo = (ims.core.vo.AssessmentQuestionShortVo)map.getValueObject(this);
if(vo == null)
{
vo = new ims.core.vo.AssessmentQuestionShortVo();
map.addValueObject(this, vo);
vo.populate(map, this);
}
return vo;
}
public Integer getId()
{
return this.id;
}
public void setId(Integer value)
{
this.id = value;
}
public int getVersion()
{
return this.version;
}
public void setVersion(int value)
{
this.version = value;
}
public Boolean getIsMandatory()
{
return this.ismandatory;
}
public void setIsMandatory(Boolean value)
{
this.ismandatory = value;
}
public ims.vo.LookupInstanceBean getActiveStatus()
{
return this.activestatus;
}
public void setActiveStatus(ims.vo.LookupInstanceBean value)
{
this.activestatus = value;
}
public Integer getSequence()
{
return this.sequence;
}
public void setSequence(Integer value)
{
this.sequence = value;
}
public Boolean getAllowsMultipleAnswers()
{
return this.allowsmultipleanswers;
}
public void setAllowsMultipleAnswers(Boolean value)
{
this.allowsmultipleanswers = value;
}
public Boolean getIsNonStandard()
{
return this.isnonstandard;
}
public void setIsNonStandard(Boolean value)
{
this.isnonstandard = value;
}
public String getURL()
{
return this.url;
}
public void setURL(String value)
{
this.url = value;
}
public String getProtocol()
{
return this.protocol;
}
public void setProtocol(String value)
{
this.protocol = value;
}
public String getLegendText()
{
return this.legendtext;
}
public void setLegendText(String value)
{
this.legendtext = value;
}
private Integer id;
private int version;
private Boolean ismandatory;
private ims.vo.LookupInstanceBean activestatus;
private Integer sequence;
private Boolean allowsmultipleanswers;
private Boolean isnonstandard;
private String url;
private String protocol;
private String legendtext;
}
| agpl-3.0 |
zwobit/exist | extensions/svn/src/org/exist/versioning/svn/internal/wc/admin/ISVNCleanupHandler.java | 794 | /*
* ====================================================================
* Copyright (c) 2004-2010 TMate Software Ltd. All rights reserved.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at http://svnkit.com/license.html.
* If newer versions of this license are posted there, you may use a
* newer version instead, at your option.
* ====================================================================
*/
package org.exist.versioning.svn.internal.wc.admin;
import org.tmatesoft.svn.core.SVNException;
/**
* @version 1.3
* @author TMate Software Ltd.
*/
public interface ISVNCleanupHandler {
public void cleanup(SVNAdminArea area) throws SVNException;
}
| lgpl-2.1 |
dizzzz/exist | exist-core/src/main/java/org/exist/xquery/value/SequenceIterator.java | 2257 | /*
* eXist-db Open Source Native XML Database
* Copyright (C) 2001 The eXist-db Authors
*
* info@exist-db.org
* http://www.exist-db.org
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.exist.xquery.value;
//TODO replace with extends Iterator<Item>
public interface SequenceIterator {
SequenceIterator EMPTY_ITERATOR = new EmptySequenceIterator();
/**
* Determines if there is a next item in the sequence
*
* @return true if there is another item available, false otherwise.
*/
boolean hasNext();
/**
* Retrieves the next item from the Sequence.
*
* If you do not care about the actual value and
* are just trying to advance the iterator, you
* should consider calling {@link #skip(long)} instead.
*
* @return The item, or null if there are no more items
*/
Item nextItem();
/**
* Returns the number of the items in the sequence
* that may be skipped over from the current position.
*
* @return The number of items that may be skipped with {@link #skip(long)},
* or -1 if no items may be skipped.
*/
default long skippable() {
return -1;
}
/**
* Skip forward over {@code n} items from the current position.
*
* @param n number of items to skip
* @return the number of items actually skipped over, zero
* if no items could be skipped, or -1 if this sequence
* does not support skipping.
*/
default long skip(final long n) {
return -1;
}
}
| lgpl-2.1 |
flydream2046/azure-sdk-for-java | resource-management/azure-mgmt-traffic-manager/src/main/java/com/microsoft/azure/management/trafficmanager/models/ProfileProperties.java | 4164 | /**
*
* Copyright (c) Microsoft and contributors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
package com.microsoft.azure.management.trafficmanager.models;
import java.util.ArrayList;
/**
* Class containing the properties of a Traffic Manager profile.
*/
public class ProfileProperties {
private DnsConfig dnsConfig;
/**
* Optional. Gets or sets the DNS settings of the Traffic Manager profile.
* @return The DnsConfig value.
*/
public DnsConfig getDnsConfig() {
return this.dnsConfig;
}
/**
* Optional. Gets or sets the DNS settings of the Traffic Manager profile.
* @param dnsConfigValue The DnsConfig value.
*/
public void setDnsConfig(final DnsConfig dnsConfigValue) {
this.dnsConfig = dnsConfigValue;
}
private ArrayList<Endpoint> endpoints;
/**
* Optional. Gets or sets the list of endpoints in the Traffic Manager
* profile.
* @return The Endpoints value.
*/
public ArrayList<Endpoint> getEndpoints() {
return this.endpoints;
}
/**
* Optional. Gets or sets the list of endpoints in the Traffic Manager
* profile.
* @param endpointsValue The Endpoints value.
*/
public void setEndpoints(final ArrayList<Endpoint> endpointsValue) {
this.endpoints = endpointsValue;
}
private MonitorConfig monitorConfig;
/**
* Optional. Gets or sets the endpoint monitoring settings of the Traffic
* Manager profile.
* @return The MonitorConfig value.
*/
public MonitorConfig getMonitorConfig() {
return this.monitorConfig;
}
/**
* Optional. Gets or sets the endpoint monitoring settings of the Traffic
* Manager profile.
* @param monitorConfigValue The MonitorConfig value.
*/
public void setMonitorConfig(final MonitorConfig monitorConfigValue) {
this.monitorConfig = monitorConfigValue;
}
private String profileStatus;
/**
* Optional. Gets or sets the status of the Traffic Manager profile.
* Possible values are 'Enabled' and 'Disabled'.
* @return The ProfileStatus value.
*/
public String getProfileStatus() {
return this.profileStatus;
}
/**
* Optional. Gets or sets the status of the Traffic Manager profile.
* Possible values are 'Enabled' and 'Disabled'.
* @param profileStatusValue The ProfileStatus value.
*/
public void setProfileStatus(final String profileStatusValue) {
this.profileStatus = profileStatusValue;
}
private String trafficRoutingMethod;
/**
* Optional. Gets or sets the traffic routing method of the Traffic Manager
* profile. Possible values are 'Performance', 'Weighted', or 'Priority'.
* @return The TrafficRoutingMethod value.
*/
public String getTrafficRoutingMethod() {
return this.trafficRoutingMethod;
}
/**
* Optional. Gets or sets the traffic routing method of the Traffic Manager
* profile. Possible values are 'Performance', 'Weighted', or 'Priority'.
* @param trafficRoutingMethodValue The TrafficRoutingMethod value.
*/
public void setTrafficRoutingMethod(final String trafficRoutingMethodValue) {
this.trafficRoutingMethod = trafficRoutingMethodValue;
}
/**
* Initializes a new instance of the ProfileProperties class.
*
*/
public ProfileProperties() {
}
}
| apache-2.0 |
genericDataCompany/hsandbox | common/mahout-distribution-0.7-hadoop1/math/target/generated-sources/org/apache/mahout/math/map/OpenByteLongHashMap.java | 19985 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
Copyright � 1999 CERN - European Organization for Nuclear Research.
Permission to use, copy, modify, distribute and sell this software and its documentation for any purpose
is hereby granted without fee, provided that the above copyright notice appear in all copies and
that both that copyright notice and this permission notice appear in supporting documentation.
CERN makes no representations about the suitability of this software for any purpose.
It is provided "as is" without expressed or implied warranty.
*/
package org.apache.mahout.math.map;
import java.util.Arrays;
import org.apache.mahout.math.function.ByteLongProcedure;
import org.apache.mahout.math.function.ByteProcedure;
import org.apache.mahout.math.list.ByteArrayList;
import org.apache.mahout.math.list.LongArrayList;
/**
* Open hash map from byte keys to long values.
**/
public class OpenByteLongHashMap extends AbstractByteLongMap {
protected static final byte FREE = 0;
protected static final byte FULL = 1;
protected static final byte REMOVED = 2;
protected static final byte NO_KEY_VALUE = 0;
/** The hash table keys. */
protected byte[] table;
/** The hash table values. */
protected long[] values;
/** The state of each hash table entry (FREE, FULL, REMOVED). */
protected byte[] state;
/** The number of table entries in state==FREE. */
protected int freeEntries;
/** Constructs an empty map with default capacity and default load factors. */
public OpenByteLongHashMap() {
this(defaultCapacity);
}
/**
* Constructs an empty map with the specified initial capacity and default load factors.
*
* @param initialCapacity the initial capacity of the map.
* @throws IllegalArgumentException if the initial capacity is less than zero.
*/
public OpenByteLongHashMap(int initialCapacity) {
this(initialCapacity, defaultMinLoadFactor, defaultMaxLoadFactor);
}
/**
* Constructs an empty map with the specified initial capacity and the specified minimum and maximum load factor.
*
* @param initialCapacity the initial capacity.
* @param minLoadFactor the minimum load factor.
* @param maxLoadFactor the maximum load factor.
* @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) ||
* (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >=
* maxLoadFactor)</tt>.
*/
public OpenByteLongHashMap(int initialCapacity, double minLoadFactor, double maxLoadFactor) {
setUp(initialCapacity, minLoadFactor, maxLoadFactor);
}
/** Removes all (key,value) associations from the receiver. Implicitly calls <tt>trimToSize()</tt>. */
@Override
public void clear() {
Arrays.fill(this.state, FREE);
distinct = 0;
freeEntries = table.length; // delta
trimToSize();
}
/**
* Returns a deep copy of the receiver.
*
* @return a deep copy of the receiver.
*/
@Override
public Object clone() {
OpenByteLongHashMap copy = (OpenByteLongHashMap) super.clone();
copy.table = copy.table.clone();
copy.values = copy.values.clone();
copy.state = copy.state.clone();
return copy;
}
/**
* Returns <tt>true</tt> if the receiver contains the specified key.
*
* @return <tt>true</tt> if the receiver contains the specified key.
*/
@Override
public boolean containsKey(byte key) {
return indexOfKey(key) >= 0;
}
/**
* Returns <tt>true</tt> if the receiver contains the specified value.
*
* @return <tt>true</tt> if the receiver contains the specified value.
*/
@Override
public boolean containsValue(long value) {
return indexOfValue(value) >= 0;
}
/**
* Ensures that the receiver can hold at least the specified number of associations without needing to allocate new
* internal memory. If necessary, allocates new internal memory and increases the capacity of the receiver. <p> This
* method never need be called; it is for performance tuning only. Calling this method before <tt>put()</tt>ing a
* large number of associations boosts performance, because the receiver will grow only once instead of potentially
* many times and hash collisions get less probable.
*
* @param minCapacity the desired minimum capacity.
*/
@Override
public void ensureCapacity(int minCapacity) {
if (table.length < minCapacity) {
int newCapacity = nextPrime(minCapacity);
rehash(newCapacity);
}
}
/**
* Applies a procedure to each key of the receiver, if any. Note: Iterates over the keys in no particular order.
* Subclasses can define a particular order, for example, "sorted by key". All methods which <i>can</i> be expressed
* in terms of this method (most methods can) <i>must guarantee</i> to use the <i>same</i> order defined by this
* method, even if it is no particular order. This is necessary so that, for example, methods <tt>keys</tt> and
* <tt>values</tt> will yield association pairs, not two uncorrelated lists.
*
* @param procedure the procedure to be applied. Stops iteration if the procedure returns <tt>false</tt>, otherwise
* continues.
* @return <tt>false</tt> if the procedure stopped before all keys where iterated over, <tt>true</tt> otherwise.
*/
@Override
public boolean forEachKey(ByteProcedure procedure) {
for (int i = table.length; i-- > 0;) {
if (state[i] == FULL) {
if (!procedure.apply(table[i])) {
return false;
}
}
}
return true;
}
/**
* Applies a procedure to each (key,value) pair of the receiver, if any. Iteration order is guaranteed to be
* <i>identical</i> to the order used by method {@link #forEachKey(ByteProcedure)}.
*
* @param procedure the procedure to be applied. Stops iteration if the procedure returns <tt>false</tt>, otherwise
* continues.
* @return <tt>false</tt> if the procedure stopped before all keys where iterated over, <tt>true</tt> otherwise.
*/
@Override
public boolean forEachPair(ByteLongProcedure procedure) {
for (int i = table.length; i-- > 0;) {
if (state[i] == FULL) {
if (!procedure.apply(table[i], values[i])) {
return false;
}
}
}
return true;
}
/**
* Returns the value associated with the specified key. It is often a good idea to first check with
* containsKey(byte) whether the given key has a value associated or not, i.e. whether there exists an association
* for the given key or not.
*
* @param key the key to be searched for.
* @return the value associated with the specified key; <tt>0</tt> if no such key is present.
*/
@Override
public long get(byte key) {
final int i = indexOfKey(key);
if (i < 0) {
return 0;
} //not contained
return values[i];
}
/**
* @param key the key to be added to the receiver.
* @return the index where the key would need to be inserted, if it is not already contained. Returns -index-1 if the
* key is already contained at slot index. Therefore, if the returned index < 0, then it is already contained
* at slot -index-1. If the returned index >= 0, then it is NOT already contained and should be inserted at
* slot index.
*/
protected int indexOfInsertion(byte key) {
final int length = table.length;
final int hash = HashFunctions.hash(key) & 0x7FFFFFFF;
int i = hash % length;
int decrement = hash % (length - 2); // double hashing, see http://www.eece.unm.edu/faculty/heileman/hash/node4.html
//int decrement = (hash / length) % length;
if (decrement == 0) {
decrement = 1;
}
// stop if we find a removed or free slot, or if we find the key itself
// do NOT skip over removed slots (yes, open addressing is like that...)
while (state[i] == FULL && table[i] != key) {
i -= decrement;
//hashCollisions++;
if (i < 0) {
i += length;
}
}
if (state[i] == REMOVED) {
// stop if we find a free slot, or if we find the key itself.
// do skip over removed slots (yes, open addressing is like that...)
// assertion: there is at least one FREE slot.
final int j = i;
while (state[i] != FREE && (state[i] == REMOVED || table[i] != key)) {
i -= decrement;
//hashCollisions++;
if (i < 0) {
i += length;
}
}
if (state[i] == FREE) {
i = j;
}
}
if (state[i] == FULL) {
// key already contained at slot i.
// return a negative number identifying the slot.
return -i - 1;
}
// not already contained, should be inserted at slot i.
// return a number >= 0 identifying the slot.
return i;
}
/**
* @param key the key to be searched in the receiver.
* @return the index where the key is contained in the receiver, returns -1 if the key was not found.
*/
protected int indexOfKey(byte key) {
final int length = table.length;
final int hash = HashFunctions.hash(key) & 0x7FFFFFFF;
int i = hash % length;
int decrement = hash % (length - 2); // double hashing, see http://www.eece.unm.edu/faculty/heileman/hash/node4.html
//int decrement = (hash / length) % length;
if (decrement == 0) {
decrement = 1;
}
// stop if we find a free slot, or if we find the key itself.
// do skip over removed slots (yes, open addressing is like that...)
while (state[i] != FREE && (state[i] == REMOVED || table[i] != key)) {
i -= decrement;
//hashCollisions++;
if (i < 0) {
i += length;
}
}
if (state[i] == FREE) {
return -1;
} // not found
return i; //found, return index where key is contained
}
/**
* @param value the value to be searched in the receiver.
* @return the index where the value is contained in the receiver, returns -1 if the value was not found.
*/
protected int indexOfValue(long value) {
long[] val = values;
byte[] stat = state;
for (int i = stat.length; --i >= 0;) {
if (stat[i] == FULL && val[i] == value) {
return i;
}
}
return -1; // not found
}
/**
* Fills all keys contained in the receiver into the specified list. Fills the list, starting at index 0. After this
* call returns the specified list has a new size that equals <tt>this.size()</tt>. Iteration order is guaranteed to
* be <i>identical</i> to the order used by method {@link #forEachKey(ByteProcedure)}.
* <p> This method can be used
* to iterate over the keys of the receiver.
*
* @param list the list to be filled, can have any size.
*/
@Override
public void keys(ByteArrayList list) {
list.setSize(distinct);
byte [] elements = list.elements();
int j = 0;
for (int i = table.length; i-- > 0;) {
if (state[i] == FULL) {
elements[j++] = table[i];
}
}
}
/**
* Fills all pairs satisfying a given condition into the specified lists. Fills into the lists, starting at index 0.
* After this call returns the specified lists both have a new size, the number of pairs satisfying the condition.
* Iteration order is guaranteed to be <i>identical</i> to the order used by method {@link
* #forEachKey(ByteProcedure)}. <p> <b>Example:</b> <br>
* <pre>
* ByteLongProcedure condition = new ByteLongProcedure() { // match even values only
* public boolean apply(byte key, long value) { return value%2==0; }
* }
* keys = (8,7,6), values = (1,2,2) --> keyList = (6,8), valueList = (2,1)</tt>
* </pre>
*
* @param condition the condition to be matched. Takes the current key as first and the current value as second
* argument.
* @param keyList the list to be filled with keys, can have any size.
* @param valueList the list to be filled with values, can have any size.
*/
@Override
public void pairsMatching(ByteLongProcedure condition,
ByteArrayList keyList,
LongArrayList valueList) {
keyList.clear();
valueList.clear();
for (int i = table.length; i-- > 0;) {
if (state[i] == FULL && condition.apply(table[i], values[i])) {
keyList.add(table[i]);
valueList.add(values[i]);
}
}
}
/**
* Associates the given key with the given value. Replaces any old <tt>(key,someOtherValue)</tt> association, if
* existing.
*
* @param key the key the value shall be associated with.
* @param value the value to be associated.
* @return <tt>true</tt> if the receiver did not already contain such a key; <tt>false</tt> if the receiver did
* already contain such a key - the new value has now replaced the formerly associated value.
*/
@Override
public boolean put(byte key, long value) {
int i = indexOfInsertion(key);
if (i < 0) { //already contained
i = -i - 1;
this.values[i] = value;
return false;
}
if (this.distinct > this.highWaterMark) {
int newCapacity = chooseGrowCapacity(this.distinct + 1, this.minLoadFactor, this.maxLoadFactor);
rehash(newCapacity);
return put(key, value);
}
this.table[i] = key;
this.values[i] = value;
if (this.state[i] == FREE) {
this.freeEntries--;
}
this.state[i] = FULL;
this.distinct++;
if (this.freeEntries < 1) { //delta
int newCapacity = chooseGrowCapacity(this.distinct + 1, this.minLoadFactor, this.maxLoadFactor);
rehash(newCapacity);
}
return true;
}
@Override
public long adjustOrPutValue(byte key, long newValue, long incrValue) {
int i = indexOfInsertion(key);
if (i < 0) { //already contained
i = -i - 1;
this.values[i] += incrValue;
return this.values[i];
} else {
put(key, newValue);
return newValue;
}
}
/**
* Rehashes the contents of the receiver into a new table with a smaller or larger capacity. This method is called
* automatically when the number of keys in the receiver exceeds the high water mark or falls below the low water
* mark.
*/
protected void rehash(int newCapacity) {
int oldCapacity = table.length;
//if (oldCapacity == newCapacity) return;
byte[] oldTable = table;
long[] oldValues = values;
byte[] oldState = state;
this.table = new byte[newCapacity];
this.values = new long[newCapacity];
this.state = new byte[newCapacity];
this.lowWaterMark = chooseLowWaterMark(newCapacity, this.minLoadFactor);
this.highWaterMark = chooseHighWaterMark(newCapacity, this.maxLoadFactor);
this.freeEntries = newCapacity - this.distinct; // delta
for (int i = oldCapacity; i-- > 0;) {
if (oldState[i] == FULL) {
byte element = oldTable[i];
int index = indexOfInsertion(element);
this.table[index] = element;
this.values[index] = oldValues[i];
this.state[index] = FULL;
}
}
}
/**
* Removes the given key with its associated element from the receiver, if present.
*
* @param key the key to be removed from the receiver.
* @return <tt>true</tt> if the receiver contained the specified key, <tt>false</tt> otherwise.
*/
@Override
public boolean removeKey(byte key) {
int i = indexOfKey(key);
if (i < 0) {
return false;
} // key not contained
this.state[i] = REMOVED;
//this.values[i]=0; // delta
this.distinct--;
if (this.distinct < this.lowWaterMark) {
int newCapacity = chooseShrinkCapacity(this.distinct, this.minLoadFactor, this.maxLoadFactor);
rehash(newCapacity);
}
return true;
}
/**
* Initializes the receiver.
*
* @param initialCapacity the initial capacity of the receiver.
* @param minLoadFactor the minLoadFactor of the receiver.
* @param maxLoadFactor the maxLoadFactor of the receiver.
* @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) ||
* (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >=
* maxLoadFactor)</tt>.
*/
@Override
protected void setUp(int initialCapacity, double minLoadFactor, double maxLoadFactor) {
int capacity = initialCapacity;
super.setUp(capacity, minLoadFactor, maxLoadFactor);
capacity = nextPrime(capacity);
if (capacity == 0) {
capacity = 1;
} // open addressing needs at least one FREE slot at any time.
this.table = new byte[capacity];
this.values = new long[capacity];
this.state = new byte[capacity];
// memory will be exhausted long before this pathological case happens, anyway.
this.minLoadFactor = minLoadFactor;
if (capacity == PrimeFinder.largestPrime) {
this.maxLoadFactor = 1.0;
} else {
this.maxLoadFactor = maxLoadFactor;
}
this.distinct = 0;
this.freeEntries = capacity; // delta
// lowWaterMark will be established upon first expansion.
// establishing it now (upon instance construction) would immediately make the table shrink upon first put(...).
// After all the idea of an "initialCapacity" implies violating lowWaterMarks when an object is young.
// See ensureCapacity(...)
this.lowWaterMark = 0;
this.highWaterMark = chooseHighWaterMark(capacity, this.maxLoadFactor);
}
/**
* Trims the capacity of the receiver to be the receiver's current size. Releases any superfluous internal memory. An
* application can use this operation to minimize the storage of the receiver.
*/
@Override
public void trimToSize() {
// * 1.2 because open addressing's performance exponentially degrades beyond that point
// so that even rehashing the table can take very long
int newCapacity = nextPrime((int) (1 + 1.2 * size()));
if (table.length > newCapacity) {
rehash(newCapacity);
}
}
/**
* Fills all values contained in the receiver into the specified list. Fills the list, starting at index 0. After this
* call returns the specified list has a new size that equals <tt>this.size()</tt>. Iteration order is guaranteed to
* be <i>identical</i> to the order used by method {@link #forEachKey(ByteProcedure)}.
* <p> This method can be used
* to iterate over the values of the receiver.
*
* @param list the list to be filled, can have any size.
*/
@Override
public void values(LongArrayList list) {
list.setSize(distinct);
long[] elements = list.elements();
int j = 0;
for (int i = state.length; i-- > 0;) {
if (state[i] == FULL) {
elements[j++] = values[i];
}
}
}
/**
* Access for unit tests.
* @param capacity
* @param minLoadFactor
* @param maxLoadFactor
*/
protected void getInternalFactors(int[] capacity,
double[] minLoadFactor,
double[] maxLoadFactor) {
capacity[0] = table.length;
minLoadFactor[0] = this.minLoadFactor;
maxLoadFactor[0] = this.maxLoadFactor;
}
}
| apache-2.0 |
electrum/presto | plugin/trino-kinesis/src/main/java/io/trino/plugin/kinesis/KinesisInternalFieldDescription.java | 3914 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.kinesis;
import io.trino.spi.connector.ColumnMetadata;
import io.trino.spi.type.BigintType;
import io.trino.spi.type.BooleanType;
import io.trino.spi.type.TimestampType;
import io.trino.spi.type.Type;
import io.trino.spi.type.VarcharType;
import java.util.Map;
import java.util.Optional;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static java.util.Arrays.stream;
import static java.util.Objects.requireNonNull;
import static java.util.function.Function.identity;
public enum KinesisInternalFieldDescription
{
SHARD_ID_FIELD("_shard_id", VarcharType.VARCHAR, "Shard Id"),
SEGMENT_START_FIELD("_segment_start", VarcharType.VARCHAR, "Segment start sequence id"),
SEGMENT_END_FIELD("_shard_sequence_id", VarcharType.VARCHAR, "Segment end sequence id"),
SHARD_SEQUENCE_ID_FIELD("_shard_sequence_id_field", BigintType.BIGINT, "Segment start offset"),
SEGMENT_COUNT_FIELD("_segment_count", BigintType.BIGINT, "Running message count per segment"),
MESSAGE_VALID_FIELD("_message_valid", BooleanType.BOOLEAN, "Message data is valid"),
MESSAGE_FIELD("_message", VarcharType.VARCHAR, "Message text"),
MESSAGE_TIMESTAMP("_message_timestamp", TimestampType.TIMESTAMP_MILLIS, "Approximate message arrival timestamp"),
MESSAGE_LENGTH_FIELD("_message_length", BigintType.BIGINT, "Total number of message bytes"),
PARTITION_KEY_FIELD("_partition_key", VarcharType.VARCHAR, "Key text");
private static final Map<String, KinesisInternalFieldDescription> BY_COLUMN_NAME = stream(KinesisInternalFieldDescription.values())
.collect(toImmutableMap(KinesisInternalFieldDescription::getColumnName, identity()));
public static KinesisInternalFieldDescription forColumnName(String columnName)
{
KinesisInternalFieldDescription description = BY_COLUMN_NAME.get(columnName);
checkArgument(description != null, "Unknown internal column name %s", columnName);
return description;
}
private final String columnName;
private final Type type;
private final String comment;
KinesisInternalFieldDescription(
String columnName,
Type type,
String comment)
{
checkArgument(!isNullOrEmpty(columnName), "name is null or is empty");
this.columnName = columnName;
this.type = requireNonNull(type, "type is null");
this.comment = requireNonNull(comment, "comment is null");
}
public String getColumnName()
{
return columnName;
}
public Type getType()
{
return type;
}
KinesisColumnHandle getColumnHandle(int index, boolean hidden)
{
return new KinesisColumnHandle(
index,
getColumnName(),
getType(),
null,
null,
null,
false,
hidden);
}
ColumnMetadata getColumnMetadata(boolean hidden)
{
return ColumnMetadata.builder()
.setName(columnName)
.setType(type)
.setComment(Optional.ofNullable(comment))
.setHidden(hidden)
.build();
}
}
| apache-2.0 |
electrum/presto | core/trino-parser/src/main/java/io/trino/sql/tree/WindowReference.java | 2333 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.sql.tree;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import static com.google.common.base.MoreObjects.toStringHelper;
import static java.util.Objects.requireNonNull;
public class WindowReference
extends Node
implements Window
{
private final Identifier name;
public WindowReference(Identifier name)
{
this(Optional.empty(), name);
}
public WindowReference(NodeLocation location, Identifier name)
{
this(Optional.of(location), name);
}
private WindowReference(Optional<NodeLocation> location, Identifier name)
{
super(location);
this.name = requireNonNull(name, "name is null");
}
public Identifier getName()
{
return name;
}
@Override
public <R, C> R accept(AstVisitor<R, C> visitor, C context)
{
return visitor.visitWindowReference(this, context);
}
@Override
public List<Node> getChildren()
{
return ImmutableList.of(name);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
WindowReference o = (WindowReference) obj;
return Objects.equals(name, o.name);
}
@Override
public int hashCode()
{
return Objects.hash(name);
}
@Override
public String toString()
{
return toStringHelper(this)
.add("name", name)
.toString();
}
@Override
public boolean shallowEquals(Node other)
{
return sameClass(this, other);
}
}
| apache-2.0 |
jeorme/OG-Platform | projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/volatility/surface/SABRFittingProperties.java | 1072 | /**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.analytics.model.volatility.surface;
/**
*
*/
public class SABRFittingProperties {
/** Fix alpha during fitting */
public static final String PROPERTY_USE_FIXED_ALPHA = "UseFixedAlpha";
/** Fixed alpha value */
public static final String PROPERTY_ALPHA = "Alpha";
/** Fix beta during fitting */
public static final String PROPERTY_USE_FIXED_BETA = "UseFixedBeta";
/** Fixed beta value */
public static final String PROPERTY_BETA = "Beta";
/** Fix nu during fitting */
public static final String PROPERTY_USE_FIXED_NU = "UseFixedNu";
/** Fixed nu value */
public static final String PROPERTY_NU = "Nu";
/** Fix rho during fitting */
public static final String PROPERTY_USE_FIXED_RHO = "UseFixedRho";
/** Fixed rho value */
public static final String PROPERTY_RHO = "Rho";
/** The error in volatility quotes */
public static final String PROPERTY_ERROR = "Error";
}
| apache-2.0 |
dcelasun/thrift | lib/java/src/org/apache/thrift/transport/AutoExpandingBufferWriteTransport.java | 2936 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.thrift.transport;
import org.apache.thrift.TConfiguration;
/**
* TTransport for writing to an AutoExpandingBuffer.
*/
public final class AutoExpandingBufferWriteTransport extends TEndpointTransport {
private final AutoExpandingBuffer buf;
private int pos;
private int res;
/**
* Constructor.
* @param initialCapacity the initial capacity of the buffer
* @param frontReserve space, if any, to reserve at the beginning such
* that the first write is after this reserve.
* This allows framed transport to reserve space
* for the frame buffer length.
* @throws IllegalArgumentException if initialCapacity is less than one
* @throws IllegalArgumentException if frontReserve is less than zero
* @throws IllegalArgumentException if frontReserve is greater than initialCapacity
*/
public AutoExpandingBufferWriteTransport(TConfiguration config, int initialCapacity, int frontReserve) throws TTransportException {
super(config);
if (initialCapacity < 1) {
throw new IllegalArgumentException("initialCapacity");
}
if (frontReserve < 0 || initialCapacity < frontReserve) {
throw new IllegalArgumentException("frontReserve");
}
this.buf = new AutoExpandingBuffer(initialCapacity);
this.pos = frontReserve;
this.res = frontReserve;
}
@Override
public void close() {}
@Override
public boolean isOpen() {return true;}
@Override
public void open() throws TTransportException {}
@Override
public int read(byte[] buf, int off, int len) throws TTransportException {
throw new UnsupportedOperationException();
}
@Override
public void write(byte[] toWrite, int off, int len) throws TTransportException {
buf.resizeIfNecessary(pos + len);
System.arraycopy(toWrite, off, buf.array(), pos, len);
pos += len;
}
public AutoExpandingBuffer getBuf() {
return buf;
}
/**
* @return length of the buffer, including any front reserve
*/
public int getLength() {
return pos;
}
public void reset() {
pos = res;
}
}
| apache-2.0 |
variacode/rundeck | core/src/main/java/com/dtolabs/rundeck/plugins/scm/JobImportReference.java | 1183 | /*
* Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dtolabs.rundeck.plugins.scm;
import com.dtolabs.rundeck.core.jobs.JobRevReference;
import java.util.Map;
/**
* Job reference which has SCM import metadata
*/
public interface JobImportReference extends JobRevReference {
/**
* @return metadata about tracked job
*/
Map getScmImportMetadata();
/**
* @return the version of the job associated with the import metadata
*/
public Long getImportVersion();
/**
* @return source Job ID if different from current ID
*/
String getSourceId();
}
| apache-2.0 |
dhootha/hadoop-common | src/mapred/org/apache/hadoop/mapred/JobHistory.java | 66630 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.StringUtils;
/**
* Provides methods for writing to and reading from job history.
* Job History works in an append mode, JobHistory and its inner classes provide methods
* to log job events.
*
* JobHistory is split into multiple files, format of each file is plain text where each line
* is of the format [type (key=value)*], where type identifies the type of the record.
* Type maps to UID of one of the inner classes of this class.
*
* Job history is maintained in a master index which contains star/stop times of all jobs with
* a few other job level properties. Apart from this each job's history is maintained in a seperate history
* file. name of job history files follows the format jobtrackerId_jobid
*
* For parsing the job history it supports a listener based interface where each line is parsed
* and passed to listener. The listener can create an object model of history or look for specific
* events and discard rest of the history.
*
* CHANGE LOG :
* Version 0 : The history has the following format :
* TAG KEY1="VALUE1" KEY2="VALUE2" and so on.
TAG can be Job, Task, MapAttempt or ReduceAttempt.
Note that a '"' is the line delimiter.
* Version 1 : Changes the line delimiter to '.'
Values are now escaped for unambiguous parsing.
Added the Meta tag to store version info.
*/
public class JobHistory {
static final long VERSION = 1L;
public static final Log LOG = LogFactory.getLog(JobHistory.class);
private static final String DELIMITER = " ";
static final char LINE_DELIMITER_CHAR = '.';
static final char[] charsToEscape = new char[] {'"', '=',
LINE_DELIMITER_CHAR};
static final String DIGITS = "[0-9]+";
static final String KEY = "(\\w+)";
// value is any character other than quote, but escaped quotes can be there
static final String VALUE = "[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*";
static final Pattern pattern = Pattern.compile(KEY + "=" + "\"" + VALUE + "\"");
public static final int JOB_NAME_TRIM_LENGTH = 50;
private static String JOBTRACKER_UNIQUE_STRING = null;
private static String LOG_DIR = null;
private static Map<String, ArrayList<PrintWriter>> openJobs =
new ConcurrentHashMap<String, ArrayList<PrintWriter>>();
private static boolean disableHistory = false;
private static final String SECONDARY_FILE_SUFFIX = ".recover";
private static long jobHistoryBlockSize = 0;
private static String jobtrackerHostname;
/**
* Record types are identifiers for each line of log in history files.
* A record type appears as the first token in a single line of log.
*/
public static enum RecordTypes {
Jobtracker, Job, Task, MapAttempt, ReduceAttempt, Meta
}
/**
* Job history files contain key="value" pairs, where keys belong to this enum.
* It acts as a global namespace for all keys.
*/
public static enum Keys {
JOBTRACKERID,
START_TIME, FINISH_TIME, JOBID, JOBNAME, USER, JOBCONF, SUBMIT_TIME,
LAUNCH_TIME, TOTAL_MAPS, TOTAL_REDUCES, FAILED_MAPS, FAILED_REDUCES,
FINISHED_MAPS, FINISHED_REDUCES, JOB_STATUS, TASKID, HOSTNAME, TASK_TYPE,
ERROR, TASK_ATTEMPT_ID, TASK_STATUS, COPY_PHASE, SORT_PHASE, REDUCE_PHASE,
SHUFFLE_FINISHED, SORT_FINISHED, COUNTERS, SPLITS, JOB_PRIORITY, HTTP_PORT,
TRACKER_NAME, STATE_STRING, VERSION, RESTART_COUNT
}
/**
* This enum contains some of the values commonly used by history log events.
* since values in history can only be strings - Values.name() is used in
* most places in history file.
*/
public static enum Values {
SUCCESS, FAILED, KILLED, MAP, REDUCE, CLEANUP, RUNNING, PREP, SETUP
}
/**
* Initialize JobHistory files.
* @param conf Jobconf of the job tracker.
* @param hostname jobtracker's hostname
* @param jobTrackerStartTime jobtracker's start time
* @return true if intialized properly
* false otherwise
*/
public static boolean init(JobConf conf, String hostname,
long jobTrackerStartTime){
try {
LOG_DIR = conf.get("hadoop.job.history.location" ,
"file:///" + new File(
System.getProperty("hadoop.log.dir")).getAbsolutePath()
+ File.separator + "history");
JOBTRACKER_UNIQUE_STRING = hostname + "_" +
String.valueOf(jobTrackerStartTime) + "_";
jobtrackerHostname = hostname;
Path logDir = new Path(LOG_DIR);
FileSystem fs = logDir.getFileSystem(conf);
if (!fs.exists(logDir)){
if (!fs.mkdirs(logDir)){
throw new IOException("Mkdirs failed to create " + logDir.toString());
}
}
conf.set("hadoop.job.history.location", LOG_DIR);
disableHistory = false;
// set the job history block size (default is 3MB)
jobHistoryBlockSize =
conf.getLong("mapred.jobtracker.job.history.block.size",
3 * 1024 * 1024);
} catch(IOException e) {
LOG.error("Failed to initialize JobHistory log file", e);
disableHistory = true;
}
return !(disableHistory);
}
/**
* Manages job-history's meta information such as version etc.
* Helps in logging version information to the job-history and recover
* version information from the history.
*/
static class MetaInfoManager implements Listener {
private long version = 0L;
private KeyValuePair pairs = new KeyValuePair();
// Extract the version of the history that was used to write the history
public MetaInfoManager(String line) throws IOException {
if (null != line) {
// Parse the line
parseLine(line, this, false);
}
}
// Get the line delimiter
char getLineDelim() {
if (version == 0) {
return '"';
} else {
return LINE_DELIMITER_CHAR;
}
}
// Checks if the values are escaped or not
boolean isValueEscaped() {
// Note that the values are not escaped in version 0
return version != 0;
}
public void handle(RecordTypes recType, Map<Keys, String> values)
throws IOException {
// Check if the record is of type META
if (RecordTypes.Meta == recType) {
pairs.handle(values);
version = pairs.getLong(Keys.VERSION); // defaults to 0
}
}
/**
* Logs history meta-info to the history file. This needs to be called once
* per history file.
* @param jobId job id, assigned by jobtracker.
*/
static void logMetaInfo(ArrayList<PrintWriter> writers){
if (!disableHistory){
if (null != writers){
JobHistory.log(writers, RecordTypes.Meta,
new Keys[] {Keys.VERSION},
new String[] {String.valueOf(VERSION)});
}
}
}
}
/** Escapes the string especially for {@link JobHistory}
*/
static String escapeString(String data) {
return StringUtils.escapeString(data, StringUtils.ESCAPE_CHAR,
charsToEscape);
}
/**
* Parses history file and invokes Listener.handle() for
* each line of history. It can be used for looking through history
* files for specific items without having to keep whole history in memory.
* @param path path to history file
* @param l Listener for history events
* @param fs FileSystem where history file is present
* @throws IOException
*/
public static void parseHistoryFromFS(String path, Listener l, FileSystem fs)
throws IOException{
FSDataInputStream in = fs.open(new Path(path));
BufferedReader reader = new BufferedReader(new InputStreamReader (in));
try {
String line = null;
StringBuffer buf = new StringBuffer();
// Read the meta-info line. Note that this might a jobinfo line for files
// written with older format
line = reader.readLine();
// Check if the file is empty
if (line == null) {
return;
}
// Get the information required for further processing
MetaInfoManager mgr = new MetaInfoManager(line);
boolean isEscaped = mgr.isValueEscaped();
String lineDelim = String.valueOf(mgr.getLineDelim());
String escapedLineDelim =
StringUtils.escapeString(lineDelim, StringUtils.ESCAPE_CHAR,
mgr.getLineDelim());
do {
buf.append(line);
if (!line.trim().endsWith(lineDelim)
|| line.trim().endsWith(escapedLineDelim)) {
buf.append("\n");
continue;
}
parseLine(buf.toString(), l, isEscaped);
buf = new StringBuffer();
} while ((line = reader.readLine())!= null);
} finally {
try { reader.close(); } catch (IOException ex) {}
}
}
/**
* Parse a single line of history.
* @param line
* @param l
* @throws IOException
*/
private static void parseLine(String line, Listener l, boolean isEscaped)
throws IOException{
// extract the record type
int idx = line.indexOf(' ');
String recType = line.substring(0, idx);
String data = line.substring(idx+1, line.length());
Matcher matcher = pattern.matcher(data);
Map<Keys,String> parseBuffer = new HashMap<Keys, String>();
while(matcher.find()){
String tuple = matcher.group(0);
String []parts = StringUtils.split(tuple, StringUtils.ESCAPE_CHAR, '=');
String value = parts[1].substring(1, parts[1].length() -1);
if (isEscaped) {
value = StringUtils.unEscapeString(value, StringUtils.ESCAPE_CHAR,
charsToEscape);
}
parseBuffer.put(Keys.valueOf(parts[0]), value);
}
l.handle(RecordTypes.valueOf(recType), parseBuffer);
parseBuffer.clear();
}
/**
* Log a raw record type with keys and values. This is method is generally not used directly.
* @param recordType type of log event
* @param key key
* @param value value
*/
static void log(PrintWriter out, RecordTypes recordType, Keys key,
String value){
value = escapeString(value);
out.println(recordType.name() + DELIMITER + key + "=\"" + value + "\""
+ DELIMITER + LINE_DELIMITER_CHAR);
}
/**
* Log a number of keys and values with record. the array length of keys and values
* should be same.
* @param recordType type of log event
* @param keys type of log event
* @param values type of log event
*/
static void log(ArrayList<PrintWriter> writers, RecordTypes recordType,
Keys[] keys, String[] values) {
StringBuffer buf = new StringBuffer(recordType.name());
buf.append(DELIMITER);
for(int i =0; i< keys.length; i++){
buf.append(keys[i]);
buf.append("=\"");
values[i] = escapeString(values[i]);
buf.append(values[i]);
buf.append("\"");
buf.append(DELIMITER);
}
buf.append(LINE_DELIMITER_CHAR);
for (PrintWriter out : writers) {
out.println(buf.toString());
}
}
/**
* Returns history disable status. by default history is enabled so this
* method returns false.
* @return true if history logging is disabled, false otherwise.
*/
public static boolean isDisableHistory() {
return disableHistory;
}
/**
* Enable/disable history logging. Default value is false, so history
* is enabled by default.
* @param disableHistory true if history should be disabled, false otherwise.
*/
public static void setDisableHistory(boolean disableHistory) {
JobHistory.disableHistory = disableHistory;
}
/**
* Base class contais utility stuff to manage types key value pairs with enums.
*/
static class KeyValuePair{
private Map<Keys, String> values = new HashMap<Keys, String>();
/**
* Get 'String' value for given key. Most of the places use Strings as
* values so the default get' method returns 'String'. This method never returns
* null to ease on GUIs. if no value is found it returns empty string ""
* @param k
* @return if null it returns empty string - ""
*/
public String get(Keys k){
String s = values.get(k);
return s == null ? "" : s;
}
/**
* Convert value from history to int and return.
* if no value is found it returns 0.
* @param k key
*/
public int getInt(Keys k){
String s = values.get(k);
if (null != s){
return Integer.parseInt(s);
}
return 0;
}
/**
* Convert value from history to int and return.
* if no value is found it returns 0.
* @param k
*/
public long getLong(Keys k){
String s = values.get(k);
if (null != s){
return Long.parseLong(s);
}
return 0;
}
/**
* Set value for the key.
* @param k
* @param s
*/
public void set(Keys k, String s){
values.put(k, s);
}
/**
* Adds all values in the Map argument to its own values.
* @param m
*/
public void set(Map<Keys, String> m){
values.putAll(m);
}
/**
* Reads values back from the history, input is same Map as passed to Listener by parseHistory().
* @param values
*/
public synchronized void handle(Map<Keys, String> values){
set(values);
}
/**
* Returns Map containing all key-values.
*/
public Map<Keys, String> getValues(){
return values;
}
}
/**
* Helper class for logging or reading back events related to job start, finish or failure.
*/
public static class JobInfo extends KeyValuePair{
private Map<String, Task> allTasks = new TreeMap<String, Task>();
/** Create new JobInfo */
public JobInfo(String jobId){
set(Keys.JOBID, jobId);
}
/**
* Returns all map and reduce tasks <taskid-Task>.
*/
public Map<String, Task> getAllTasks() { return allTasks; }
/**
* Get the path of the locally stored job file
* @param jobId id of the job
* @return the path of the job file on the local file system
*/
public static String getLocalJobFilePath(JobID jobId){
return System.getProperty("hadoop.log.dir") + File.separator +
jobId + "_conf.xml";
}
/**
* Helper function to encode the URL of the path of the job-history
* log file.
*
* @param logFile path of the job-history file
* @return URL encoded path
* @throws IOException
*/
public static String encodeJobHistoryFilePath(String logFile)
throws IOException {
Path rawPath = new Path(logFile);
String encodedFileName = null;
try {
encodedFileName = URLEncoder.encode(rawPath.getName(), "UTF-8");
} catch (UnsupportedEncodingException uee) {
IOException ioe = new IOException();
ioe.initCause(uee);
ioe.setStackTrace(uee.getStackTrace());
throw ioe;
}
Path encodedPath = new Path(rawPath.getParent(), encodedFileName);
return encodedPath.toString();
}
/**
* Helper function to encode the URL of the filename of the job-history
* log file.
*
* @param logFileName file name of the job-history file
* @return URL encoded filename
* @throws IOException
*/
public static String encodeJobHistoryFileName(String logFileName)
throws IOException {
String encodedFileName = null;
try {
encodedFileName = URLEncoder.encode(logFileName, "UTF-8");
} catch (UnsupportedEncodingException uee) {
IOException ioe = new IOException();
ioe.initCause(uee);
ioe.setStackTrace(uee.getStackTrace());
throw ioe;
}
return encodedFileName;
}
/**
* Helper function to decode the URL of the filename of the job-history
* log file.
*
* @param logFileName file name of the job-history file
* @return URL decoded filename
* @throws IOException
*/
public static String decodeJobHistoryFileName(String logFileName)
throws IOException {
String decodedFileName = null;
try {
decodedFileName = URLDecoder.decode(logFileName, "UTF-8");
} catch (UnsupportedEncodingException uee) {
IOException ioe = new IOException();
ioe.initCause(uee);
ioe.setStackTrace(uee.getStackTrace());
throw ioe;
}
return decodedFileName;
}
/**
* Get the job name from the job conf
*/
static String getJobName(JobConf jobConf) {
String jobName = jobConf.getJobName();
if (jobName == null || jobName.length() == 0) {
jobName = "NA";
}
return jobName;
}
/**
* Get the user name from the job conf
*/
public static String getUserName(JobConf jobConf) {
String user = jobConf.getUser();
if (user == null || user.length() == 0) {
user = "NA";
}
return user;
}
/**
* Get the job history file path given the history filename
*/
public static Path getJobHistoryLogLocation(String logFileName)
{
return LOG_DIR == null ? null : new Path(LOG_DIR, logFileName);
}
/**
* Get the user job history file path
*/
public static Path getJobHistoryLogLocationForUser(String logFileName,
JobConf jobConf) {
// find user log directory
Path userLogFile = null;
Path outputPath = FileOutputFormat.getOutputPath(jobConf);
String userLogDir = jobConf.get("hadoop.job.history.user.location",
outputPath == null
? null
: outputPath.toString());
if ("none".equals(userLogDir)) {
userLogDir = null;
}
if (userLogDir != null) {
userLogDir = userLogDir + Path.SEPARATOR + "_logs" + Path.SEPARATOR
+ "history";
userLogFile = new Path(userLogDir, logFileName);
}
return userLogFile;
}
/**
* Generates the job history filename for a new job
*/
private static String getNewJobHistoryFileName(JobConf jobConf, JobID id) {
return JOBTRACKER_UNIQUE_STRING
+ id.toString() + "_" + getUserName(jobConf) + "_"
+ trimJobName(getJobName(jobConf));
}
/**
* Trims the job-name if required
*/
private static String trimJobName(String jobName) {
if (jobName.length() > JOB_NAME_TRIM_LENGTH) {
jobName = jobName.substring(0, JOB_NAME_TRIM_LENGTH);
}
return jobName;
}
private static String escapeRegexChars( String string ) {
return "\\Q"+string.replaceAll("\\\\E", "\\\\E\\\\\\\\E\\\\Q")+"\\E";
}
/**
* Recover the job history filename from the history folder.
* Uses the following pattern
* $jt-hostname_[0-9]*_$job-id_$user-$job-name*
* @param jobConf the job conf
* @param id job id
*/
public static synchronized String getJobHistoryFileName(JobConf jobConf,
JobID id)
throws IOException {
String user = getUserName(jobConf);
String jobName = trimJobName(getJobName(jobConf));
FileSystem fs = new Path(LOG_DIR).getFileSystem(jobConf);
if (LOG_DIR == null) {
return null;
}
jobName = escapeRegexChars( jobName );
// Make the pattern matching the job's history file
final Pattern historyFilePattern =
Pattern.compile(jobtrackerHostname + "_" + DIGITS + "_"
+ id.toString() + "_" + user + "_" + jobName + "+");
// a path filter that matches 4 parts of the filenames namely
// - jt-hostname
// - job-id
// - username
// - jobname
PathFilter filter = new PathFilter() {
public boolean accept(Path path) {
String fileName = path.getName();
try {
fileName = decodeJobHistoryFileName(fileName);
} catch (IOException ioe) {
LOG.info("Error while decoding history file " + fileName + "."
+ " Ignoring file.", ioe);
return false;
}
return historyFilePattern.matcher(fileName).find();
}
};
FileStatus[] statuses = fs.listStatus(new Path(LOG_DIR), filter);
String filename;
if (statuses.length == 0) {
filename =
encodeJobHistoryFileName(getNewJobHistoryFileName(jobConf, id));
} else {
// return filename considering that fact the name can be a
// secondary filename like filename.recover
filename = decodeJobHistoryFileName(statuses[0].getPath().getName());
// Remove the '.recover' suffix if it exists
if (filename.endsWith(jobName + SECONDARY_FILE_SUFFIX)) {
int newLength = filename.length() - SECONDARY_FILE_SUFFIX.length();
filename = filename.substring(0, newLength);
}
filename = encodeJobHistoryFileName(filename);
}
return filename;
}
/** Since there was a restart, there should be a master file and
* a recovery file. Once the recovery is complete, the master should be
* deleted as an indication that the recovery file should be treated as the
* master upon completion or next restart.
* @param fileName the history filename that needs checkpointing
* @param conf Job conf
* @throws IOException
*/
static synchronized void checkpointRecovery(String fileName, JobConf conf)
throws IOException {
Path logPath = JobHistory.JobInfo.getJobHistoryLogLocation(fileName);
if (logPath != null) {
FileSystem fs = logPath.getFileSystem(conf);
fs.delete(logPath, false);
}
// do the same for the user file too
logPath = JobHistory.JobInfo.getJobHistoryLogLocationForUser(fileName,
conf);
if (logPath != null) {
FileSystem fs = logPath.getFileSystem(conf);
fs.delete(logPath, false);
}
}
static String getSecondaryJobHistoryFile(String filename)
throws IOException {
return encodeJobHistoryFileName(
decodeJobHistoryFileName(filename) + SECONDARY_FILE_SUFFIX);
}
/** Selects one of the two files generated as a part of recovery.
* The thumb rule is that always select the oldest file.
* This call makes sure that only one file is left in the end.
* @param conf job conf
* @param logFilePath Path of the log file
* @throws IOException
*/
public synchronized static Path recoverJobHistoryFile(JobConf conf,
Path logFilePath)
throws IOException {
FileSystem fs = logFilePath.getFileSystem(conf);
String tmpFilename = getSecondaryJobHistoryFile(logFilePath.getName());
Path logDir = logFilePath.getParent();
Path tmpFilePath = new Path(logDir, tmpFilename);
if (fs.exists(logFilePath)) {
if (fs.exists(tmpFilePath)) {
fs.delete(tmpFilePath, false);
}
return tmpFilePath;
} else {
if (fs.exists(tmpFilePath)) {
fs.rename(tmpFilePath, logFilePath);
return tmpFilePath;
} else {
return logFilePath;
}
}
}
/** Finalize the recovery and make one file in the end.
* This invloves renaming the recover file to the master file.
* @param id Job id
* @param conf the job conf
* @throws IOException
*/
static synchronized void finalizeRecovery(JobID id, JobConf conf)
throws IOException {
String masterLogFileName =
JobHistory.JobInfo.getJobHistoryFileName(conf, id);
Path masterLogPath =
JobHistory.JobInfo.getJobHistoryLogLocation(masterLogFileName);
String tmpLogFileName = getSecondaryJobHistoryFile(masterLogFileName);
Path tmpLogPath =
JobHistory.JobInfo.getJobHistoryLogLocation(tmpLogFileName);
if (masterLogPath != null) {
FileSystem fs = masterLogPath.getFileSystem(conf);
// rename the tmp file to the master file. Note that this should be
// done only when the file is closed and handles are released.
if(fs.exists(tmpLogPath)) {
fs.rename(tmpLogPath, masterLogPath);
}
}
// do the same for the user file too
masterLogPath =
JobHistory.JobInfo.getJobHistoryLogLocationForUser(masterLogFileName,
conf);
tmpLogPath =
JobHistory.JobInfo.getJobHistoryLogLocationForUser(tmpLogFileName,
conf);
if (masterLogPath != null) {
FileSystem fs = masterLogPath.getFileSystem(conf);
if (fs.exists(tmpLogPath)) {
fs.rename(tmpLogPath, masterLogPath);
}
}
}
/**
* Log job submitted event to history. Creates a new file in history
* for the job. if history file creation fails, it disables history
* for all other events.
* @param jobId job id assigned by job tracker.
* @param jobConf job conf of the job
* @param jobConfPath path to job conf xml file in HDFS.
* @param submitTime time when job tracker received the job
* @throws IOException
*/
public static void logSubmitted(JobID jobId, JobConf jobConf,
String jobConfPath, long submitTime)
throws IOException {
FileSystem fs = null;
String userLogDir = null;
String jobUniqueString = JOBTRACKER_UNIQUE_STRING + jobId;
if (!disableHistory){
// Get the username and job name to be used in the actual log filename;
// sanity check them too
String jobName = getJobName(jobConf);
String user = getUserName(jobConf);
// get the history filename
String logFileName =
getJobHistoryFileName(jobConf, jobId);
// setup the history log file for this job
Path logFile = getJobHistoryLogLocation(logFileName);
// find user log directory
Path userLogFile =
getJobHistoryLogLocationForUser(logFileName, jobConf);
try{
ArrayList<PrintWriter> writers = new ArrayList<PrintWriter>();
FSDataOutputStream out = null;
PrintWriter writer = null;
if (LOG_DIR != null) {
// create output stream for logging in hadoop.job.history.location
fs = new Path(LOG_DIR).getFileSystem(jobConf);
logFile = recoverJobHistoryFile(jobConf, logFile);
int defaultBufferSize =
fs.getConf().getInt("io.file.buffer.size", 4096);
out = fs.create(logFile, FsPermission.getDefault(), true,
defaultBufferSize,
fs.getDefaultReplication(),
jobHistoryBlockSize, null);
writer = new PrintWriter(out);
writers.add(writer);
}
if (userLogFile != null) {
userLogDir = userLogFile.getParent().toString();
// create output stream for logging
// in hadoop.job.history.user.location
fs = userLogFile.getFileSystem(jobConf);
userLogFile = recoverJobHistoryFile(jobConf, userLogFile);
out = fs.create(userLogFile, true, 4096);
writer = new PrintWriter(out);
writers.add(writer);
}
openJobs.put(jobUniqueString, writers);
// Log the history meta info
JobHistory.MetaInfoManager.logMetaInfo(writers);
//add to writer as well
JobHistory.log(writers, RecordTypes.Job,
new Keys[]{Keys.JOBID, Keys.JOBNAME, Keys.USER, Keys.SUBMIT_TIME, Keys.JOBCONF },
new String[]{jobId.toString(), jobName, user,
String.valueOf(submitTime) , jobConfPath}
);
}catch(IOException e){
LOG.error("Failed creating job history log file, disabling history", e);
disableHistory = true;
}
}
// Always store job conf on local file system
String localJobFilePath = JobInfo.getLocalJobFilePath(jobId);
File localJobFile = new File(localJobFilePath);
FileOutputStream jobOut = null;
try {
jobOut = new FileOutputStream(localJobFile);
jobConf.writeXml(jobOut);
if (LOG.isDebugEnabled()) {
LOG.debug("Job conf for " + jobId + " stored at "
+ localJobFile.getAbsolutePath());
}
} catch (IOException ioe) {
LOG.error("Failed to store job conf on the local filesystem ", ioe);
} finally {
if (jobOut != null) {
try {
jobOut.close();
} catch (IOException ie) {
LOG.info("Failed to close the job configuration file "
+ StringUtils.stringifyException(ie));
}
}
}
/* Storing the job conf on the log dir */
Path jobFilePath = null;
if (LOG_DIR != null) {
jobFilePath = new Path(LOG_DIR + File.separator +
jobUniqueString + "_conf.xml");
}
Path userJobFilePath = null;
if (userLogDir != null) {
userJobFilePath = new Path(userLogDir + File.separator +
jobUniqueString + "_conf.xml");
}
FSDataOutputStream jobFileOut = null;
try {
if (LOG_DIR != null) {
fs = new Path(LOG_DIR).getFileSystem(jobConf);
if (!fs.exists(jobFilePath)) {
jobFileOut = fs.create(jobFilePath);
jobConf.writeXml(jobFileOut);
jobFileOut.close();
}
}
if (userLogDir != null) {
fs = new Path(userLogDir).getFileSystem(jobConf);
jobFileOut = fs.create(userJobFilePath);
jobConf.writeXml(jobFileOut);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Job conf for " + jobId + " stored at "
+ jobFilePath + "and" + userJobFilePath );
}
} catch (IOException ioe) {
LOG.error("Failed to store job conf on the local filesystem ", ioe);
} finally {
if (jobFileOut != null) {
try {
jobFileOut.close();
} catch (IOException ie) {
LOG.info("Failed to close the job configuration file "
+ StringUtils.stringifyException(ie));
}
}
}
}
/**
* Logs launch time of job.
*
* @param jobId job id, assigned by jobtracker.
* @param startTime start time of job.
* @param totalMaps total maps assigned by jobtracker.
* @param totalReduces total reduces.
*/
public static void logInited(JobID jobId, long startTime,
int totalMaps, int totalReduces) {
if (!disableHistory){
String logFileKey = JOBTRACKER_UNIQUE_STRING + jobId;
ArrayList<PrintWriter> writer = openJobs.get(logFileKey);
if (null != writer){
JobHistory.log(writer, RecordTypes.Job,
new Keys[] {Keys.JOBID, Keys.LAUNCH_TIME, Keys.TOTAL_MAPS,
Keys.TOTAL_REDUCES, Keys.JOB_STATUS},
new String[] {jobId.toString(), String.valueOf(startTime),
String.valueOf(totalMaps),
String.valueOf(totalReduces),
Values.PREP.name()});
}
}
}
/**
* Logs the job as RUNNING.
*
* @param jobId job id, assigned by jobtracker.
* @param startTime start time of job.
* @param totalMaps total maps assigned by jobtracker.
* @param totalReduces total reduces.
* @deprecated Use {@link #logInited(JobID, long, int, int)} and
* {@link #logStarted(JobID)}
*/
@Deprecated
public static void logStarted(JobID jobId, long startTime,
int totalMaps, int totalReduces) {
logStarted(jobId);
}
/**
* Logs job as running
* @param jobId job id, assigned by jobtracker.
*/
public static void logStarted(JobID jobId){
if (!disableHistory){
String logFileKey = JOBTRACKER_UNIQUE_STRING + jobId;
ArrayList<PrintWriter> writer = openJobs.get(logFileKey);
if (null != writer){
JobHistory.log(writer, RecordTypes.Job,
new Keys[] {Keys.JOBID, Keys.JOB_STATUS},
new String[] {jobId.toString(),
Values.RUNNING.name()});
}
}
}
/**
* Log job finished. closes the job file in history.
* @param jobId job id, assigned by jobtracker.
* @param finishTime finish time of job in ms.
* @param finishedMaps no of maps successfully finished.
* @param finishedReduces no of reduces finished sucessfully.
* @param failedMaps no of failed map tasks.
* @param failedReduces no of failed reduce tasks.
* @param counters the counters from the job
*/
public static void logFinished(JobID jobId, long finishTime,
int finishedMaps, int finishedReduces,
int failedMaps, int failedReduces,
Counters counters){
if (!disableHistory){
// close job file for this job
String logFileKey = JOBTRACKER_UNIQUE_STRING + jobId;
ArrayList<PrintWriter> writer = openJobs.get(logFileKey);
if (null != writer){
JobHistory.log(writer, RecordTypes.Job,
new Keys[] {Keys.JOBID, Keys.FINISH_TIME,
Keys.JOB_STATUS, Keys.FINISHED_MAPS,
Keys.FINISHED_REDUCES,
Keys.FAILED_MAPS, Keys.FAILED_REDUCES,
Keys.COUNTERS},
new String[] {jobId.toString(), Long.toString(finishTime),
Values.SUCCESS.name(),
String.valueOf(finishedMaps),
String.valueOf(finishedReduces),
String.valueOf(failedMaps),
String.valueOf(failedReduces),
counters.makeEscapedCompactString()});
for (PrintWriter out : writer) {
out.close();
}
openJobs.remove(logFileKey);
}
Thread historyCleaner = new Thread(new HistoryCleaner());
historyCleaner.start();
}
}
/**
* Logs job failed event. Closes the job history log file.
* @param jobid job id
* @param timestamp time when job failure was detected in ms.
* @param finishedMaps no finished map tasks.
* @param finishedReduces no of finished reduce tasks.
*/
public static void logFailed(JobID jobid, long timestamp, int finishedMaps, int finishedReduces){
if (!disableHistory){
String logFileKey = JOBTRACKER_UNIQUE_STRING + jobid;
ArrayList<PrintWriter> writer = openJobs.get(logFileKey);
if (null != writer){
JobHistory.log(writer, RecordTypes.Job,
new Keys[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
new String[] {jobid.toString(), String.valueOf(timestamp), Values.FAILED.name(), String.valueOf(finishedMaps),
String.valueOf(finishedReduces)});
for (PrintWriter out : writer) {
out.close();
}
openJobs.remove(logFileKey);
}
}
}
/**
* Logs job killed event. Closes the job history log file.
*
* @param jobid
* job id
* @param timestamp
* time when job killed was issued in ms.
* @param finishedMaps
* no finished map tasks.
* @param finishedReduces
* no of finished reduce tasks.
*/
public static void logKilled(JobID jobid, long timestamp, int finishedMaps,
int finishedReduces) {
if (!disableHistory) {
String logFileKey = JOBTRACKER_UNIQUE_STRING + jobid;
ArrayList<PrintWriter> writer = openJobs.get(logFileKey);
if (null != writer) {
JobHistory.log(writer, RecordTypes.Job, new Keys[] { Keys.JOBID,
Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS,
Keys.FINISHED_REDUCES }, new String[] { jobid.toString(),
String.valueOf(timestamp), Values.KILLED.name(),
String.valueOf(finishedMaps), String.valueOf(finishedReduces) });
for (PrintWriter out : writer) {
out.close();
}
openJobs.remove(logFileKey);
}
}
}
/**
* Log job's priority.
* @param jobid job id
* @param priority Jobs priority
*/
public static void logJobPriority(JobID jobid, JobPriority priority){
if (!disableHistory){
String logFileKey = JOBTRACKER_UNIQUE_STRING + jobid;
ArrayList<PrintWriter> writer = openJobs.get(logFileKey);
if (null != writer){
JobHistory.log(writer, RecordTypes.Job,
new Keys[] {Keys.JOBID, Keys.JOB_PRIORITY},
new String[] {jobid.toString(), priority.toString()});
}
}
}
/**
* Log job's submit-time/launch-time
* @param jobid job id
* @param submitTime job's submit time
* @param launchTime job's launch time
* @param restartCount number of times the job got restarted
*/
public static void logJobInfo(JobID jobid, long submitTime, long launchTime,
int restartCount){
if (!disableHistory){
String logFileKey = JOBTRACKER_UNIQUE_STRING + jobid;
ArrayList<PrintWriter> writer = openJobs.get(logFileKey);
if (null != writer){
JobHistory.log(writer, RecordTypes.Job,
new Keys[] {Keys.JOBID, Keys.SUBMIT_TIME,
Keys.LAUNCH_TIME, Keys.RESTART_COUNT},
new String[] {jobid.toString(),
String.valueOf(submitTime),
String.valueOf(launchTime),
String.valueOf(restartCount)});
}
}
}
}
/**
* Helper class for logging or reading back events related to Task's start, finish or failure.
* All events logged by this class are logged in a separate file per job in
* job tracker history. These events map to TIPs in jobtracker.
*/
public static class Task extends KeyValuePair{
private Map <String, TaskAttempt> taskAttempts = new TreeMap<String, TaskAttempt>();
/**
* Log start time of task (TIP).
* @param taskId task id
* @param taskType MAP or REDUCE
* @param startTime startTime of tip.
*/
public static void logStarted(TaskID taskId, String taskType,
long startTime, String splitLocations) {
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.Task,
new Keys[]{Keys.TASKID, Keys.TASK_TYPE ,
Keys.START_TIME, Keys.SPLITS},
new String[]{taskId.toString(), taskType,
String.valueOf(startTime),
splitLocations});
}
}
}
/**
* Log finish time of task.
* @param taskId task id
* @param taskType MAP or REDUCE
* @param finishTime finish timeof task in ms
*/
public static void logFinished(TaskID taskId, String taskType,
long finishTime, Counters counters){
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.Task,
new Keys[]{Keys.TASKID, Keys.TASK_TYPE,
Keys.TASK_STATUS, Keys.FINISH_TIME,
Keys.COUNTERS},
new String[]{ taskId.toString(), taskType, Values.SUCCESS.name(),
String.valueOf(finishTime),
counters.makeEscapedCompactString()});
}
}
}
/**
* Update the finish time of task.
* @param taskId task id
* @param finishTime finish time of task in ms
*/
public static void logUpdates(TaskID taskId, long finishTime){
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.Task,
new Keys[]{Keys.TASKID, Keys.FINISH_TIME},
new String[]{ taskId.toString(),
String.valueOf(finishTime)});
}
}
}
/**
* Log job failed event.
* @param taskId task id
* @param taskType MAP or REDUCE.
* @param time timestamp when job failed detected.
* @param error error message for failure.
*/
public static void logFailed(TaskID taskId, String taskType, long time, String error){
logFailed(taskId, taskType, time, error, null);
}
/**
* @param failedDueToAttempt The attempt that caused the failure, if any
*/
public static void logFailed(TaskID taskId, String taskType, long time,
String error,
TaskAttemptID failedDueToAttempt){
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskId.getJobID());
if (null != writer){
String failedAttempt = failedDueToAttempt == null
? ""
: failedDueToAttempt.toString();
JobHistory.log(writer, RecordTypes.Task,
new Keys[]{Keys.TASKID, Keys.TASK_TYPE,
Keys.TASK_STATUS, Keys.FINISH_TIME,
Keys.ERROR, Keys.TASK_ATTEMPT_ID},
new String[]{ taskId.toString(), taskType,
Values.FAILED.name(),
String.valueOf(time) , error,
failedAttempt});
}
}
}
/**
* Returns all task attempts for this task. <task attempt id - TaskAttempt>
*/
public Map<String, TaskAttempt> getTaskAttempts(){
return this.taskAttempts;
}
}
/**
* Base class for Map and Reduce TaskAttempts.
*/
public static class TaskAttempt extends Task{}
/**
* Helper class for logging or reading back events related to start, finish or failure of
* a Map Attempt on a node.
*/
public static class MapAttempt extends TaskAttempt{
/**
* Log start time of this map task attempt.
* @param taskAttemptId task attempt id
* @param startTime start time of task attempt as reported by task tracker.
* @param hostName host name of the task attempt.
* @deprecated Use
* {@link #logStarted(TaskAttemptID, long, String, int, String)}
*/
@Deprecated
public static void logStarted(TaskAttemptID taskAttemptId, long startTime, String hostName){
logStarted(taskAttemptId, startTime, hostName, -1, Values.MAP.name());
}
/**
* Log start time of this map task attempt.
*
* @param taskAttemptId task attempt id
* @param startTime start time of task attempt as reported by task tracker.
* @param trackerName name of the tracker executing the task attempt.
* @param httpPort http port of the task tracker executing the task attempt
* @param taskType Whether the attempt is cleanup or setup or map
*/
public static void logStarted(TaskAttemptID taskAttemptId, long startTime,
String trackerName, int httpPort,
String taskType) {
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskAttemptId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.MapAttempt,
new Keys[]{ Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.START_TIME,
Keys.TRACKER_NAME, Keys.HTTP_PORT},
new String[]{taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
String.valueOf(startTime), trackerName,
httpPort == -1 ? "" :
String.valueOf(httpPort)});
}
}
}
/**
* Log finish time of map task attempt.
* @param taskAttemptId task attempt id
* @param finishTime finish time
* @param hostName host name
* @deprecated Use
* {@link #logFinished(TaskAttemptID, long, String, String, String, Counters)}
*/
@Deprecated
public static void logFinished(TaskAttemptID taskAttemptId, long finishTime,
String hostName){
logFinished(taskAttemptId, finishTime, hostName, Values.MAP.name(), "",
new Counters());
}
/**
* Log finish time of map task attempt.
*
* @param taskAttemptId task attempt id
* @param finishTime finish time
* @param hostName host name
* @param taskType Whether the attempt is cleanup or setup or map
* @param stateString state string of the task attempt
* @param counter counters of the task attempt
*/
public static void logFinished(TaskAttemptID taskAttemptId,
long finishTime,
String hostName,
String taskType,
String stateString,
Counters counter) {
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskAttemptId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.MapAttempt,
new Keys[]{ Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS,
Keys.FINISH_TIME, Keys.HOSTNAME,
Keys.STATE_STRING, Keys.COUNTERS},
new String[]{taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
Values.SUCCESS.name(),
String.valueOf(finishTime), hostName,
stateString,
counter.makeEscapedCompactString()});
}
}
}
/**
* Log task attempt failed event.
* @param taskAttemptId task attempt id
* @param timestamp timestamp
* @param hostName hostname of this task attempt.
* @param error error message if any for this task attempt.
* @deprecated Use
* {@link #logFailed(TaskAttemptID, long, String, String, String)}
*/
@Deprecated
public static void logFailed(TaskAttemptID taskAttemptId,
long timestamp, String hostName,
String error) {
logFailed(taskAttemptId, timestamp, hostName, error, Values.MAP.name());
}
/**
* Log task attempt failed event.
*
* @param taskAttemptId task attempt id
* @param timestamp timestamp
* @param hostName hostname of this task attempt.
* @param error error message if any for this task attempt.
* @param taskType Whether the attempt is cleanup or setup or map
*/
public static void logFailed(TaskAttemptID taskAttemptId,
long timestamp, String hostName,
String error, String taskType) {
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskAttemptId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.MapAttempt,
new Keys[]{Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS,
Keys.FINISH_TIME, Keys.HOSTNAME, Keys.ERROR},
new String[]{ taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
Values.FAILED.name(),
String.valueOf(timestamp),
hostName, error});
}
}
}
/**
* Log task attempt killed event.
* @param taskAttemptId task attempt id
* @param timestamp timestamp
* @param hostName hostname of this task attempt.
* @param error error message if any for this task attempt.
* @deprecated Use
* {@link #logKilled(TaskAttemptID, long, String, String, String)}
*/
@Deprecated
public static void logKilled(TaskAttemptID taskAttemptId,
long timestamp, String hostName, String error){
logKilled(taskAttemptId, timestamp, hostName, error, Values.MAP.name());
}
/**
* Log task attempt killed event.
*
* @param taskAttemptId task attempt id
* @param timestamp timestamp
* @param hostName hostname of this task attempt.
* @param error error message if any for this task attempt.
* @param taskType Whether the attempt is cleanup or setup or map
*/
public static void logKilled(TaskAttemptID taskAttemptId,
long timestamp, String hostName,
String error, String taskType) {
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskAttemptId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.MapAttempt,
new Keys[]{Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS,
Keys.FINISH_TIME, Keys.HOSTNAME,
Keys.ERROR},
new String[]{ taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
Values.KILLED.name(),
String.valueOf(timestamp),
hostName, error});
}
}
}
}
/**
* Helper class for logging or reading back events related to start, finish or failure of
* a Map Attempt on a node.
*/
public static class ReduceAttempt extends TaskAttempt{
/**
* Log start time of Reduce task attempt.
* @param taskAttemptId task attempt id
* @param startTime start time
* @param hostName host name
* @deprecated Use
* {@link #logStarted(TaskAttemptID, long, String, int, String)}
*/
@Deprecated
public static void logStarted(TaskAttemptID taskAttemptId,
long startTime, String hostName){
logStarted(taskAttemptId, startTime, hostName, -1, Values.REDUCE.name());
}
/**
* Log start time of Reduce task attempt.
*
* @param taskAttemptId task attempt id
* @param startTime start time
* @param trackerName tracker name
* @param httpPort the http port of the tracker executing the task attempt
* @param taskType Whether the attempt is cleanup or setup or reduce
*/
public static void logStarted(TaskAttemptID taskAttemptId,
long startTime, String trackerName,
int httpPort,
String taskType) {
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskAttemptId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.ReduceAttempt,
new Keys[]{ Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.START_TIME,
Keys.TRACKER_NAME, Keys.HTTP_PORT},
new String[]{taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
String.valueOf(startTime), trackerName,
httpPort == -1 ? "" :
String.valueOf(httpPort)});
}
}
}
/**
* Log finished event of this task.
* @param taskAttemptId task attempt id
* @param shuffleFinished shuffle finish time
* @param sortFinished sort finish time
* @param finishTime finish time of task
* @param hostName host name where task attempt executed
* @deprecated Use
* {@link #logFinished(TaskAttemptID, long, long, long, String, String, String, Counters)}
*/
@Deprecated
public static void logFinished(TaskAttemptID taskAttemptId, long shuffleFinished,
long sortFinished, long finishTime,
String hostName){
logFinished(taskAttemptId, shuffleFinished, sortFinished,
finishTime, hostName, Values.REDUCE.name(),
"", new Counters());
}
/**
* Log finished event of this task.
*
* @param taskAttemptId task attempt id
* @param shuffleFinished shuffle finish time
* @param sortFinished sort finish time
* @param finishTime finish time of task
* @param hostName host name where task attempt executed
* @param taskType Whether the attempt is cleanup or setup or reduce
* @param stateString the state string of the attempt
* @param counter counters of the attempt
*/
public static void logFinished(TaskAttemptID taskAttemptId,
long shuffleFinished,
long sortFinished, long finishTime,
String hostName, String taskType,
String stateString, Counters counter) {
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskAttemptId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.ReduceAttempt,
new Keys[]{ Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS,
Keys.SHUFFLE_FINISHED, Keys.SORT_FINISHED,
Keys.FINISH_TIME, Keys.HOSTNAME,
Keys.STATE_STRING, Keys.COUNTERS},
new String[]{taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
Values.SUCCESS.name(),
String.valueOf(shuffleFinished),
String.valueOf(sortFinished),
String.valueOf(finishTime), hostName,
stateString,
counter.makeEscapedCompactString()});
}
}
}
/**
* Log failed reduce task attempt.
* @param taskAttemptId task attempt id
* @param timestamp time stamp when task failed
* @param hostName host name of the task attempt.
* @param error error message of the task.
* @deprecated Use
* {@link #logFailed(TaskAttemptID, long, String, String, String)}
*/
@Deprecated
public static void logFailed(TaskAttemptID taskAttemptId, long timestamp,
String hostName, String error){
logFailed(taskAttemptId, timestamp, hostName, error, Values.REDUCE.name());
}
/**
* Log failed reduce task attempt.
*
* @param taskAttemptId task attempt id
* @param timestamp time stamp when task failed
* @param hostName host name of the task attempt.
* @param error error message of the task.
* @param taskType Whether the attempt is cleanup or setup or reduce
*/
public static void logFailed(TaskAttemptID taskAttemptId, long timestamp,
String hostName, String error,
String taskType) {
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskAttemptId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.ReduceAttempt,
new Keys[]{ Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS,
Keys.FINISH_TIME, Keys.HOSTNAME,
Keys.ERROR },
new String[]{ taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
Values.FAILED.name(),
String.valueOf(timestamp), hostName, error });
}
}
}
/**
* Log killed reduce task attempt.
* @param taskAttemptId task attempt id
* @param timestamp time stamp when task failed
* @param hostName host name of the task attempt.
* @param error error message of the task.
* @deprecated Use
* {@link #logKilled(TaskAttemptID, long, String, String, String)}
*/
@Deprecated
public static void logKilled(TaskAttemptID taskAttemptId, long timestamp,
String hostName, String error) {
logKilled(taskAttemptId, timestamp, hostName, error, Values.REDUCE.name());
}
/**
* Log killed reduce task attempt.
*
* @param taskAttemptId task attempt id
* @param timestamp time stamp when task failed
* @param hostName host name of the task attempt.
* @param error error message of the task.
* @param taskType Whether the attempt is cleanup or setup or reduce
*/
public static void logKilled(TaskAttemptID taskAttemptId, long timestamp,
String hostName, String error,
String taskType) {
if (!disableHistory){
ArrayList<PrintWriter> writer = openJobs.get(JOBTRACKER_UNIQUE_STRING
+ taskAttemptId.getJobID());
if (null != writer){
JobHistory.log(writer, RecordTypes.ReduceAttempt,
new Keys[]{ Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS,
Keys.FINISH_TIME, Keys.HOSTNAME,
Keys.ERROR },
new String[]{ taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
Values.KILLED.name(),
String.valueOf(timestamp),
hostName, error });
}
}
}
}
/**
* Callback interface for reading back log events from JobHistory. This interface
* should be implemented and passed to JobHistory.parseHistory()
*
*/
public static interface Listener{
/**
* Callback method for history parser.
* @param recType type of record, which is the first entry in the line.
* @param values a map of key-value pairs as thry appear in history.
* @throws IOException
*/
public void handle(RecordTypes recType, Map<Keys, String> values) throws IOException;
}
/**
* Delete history files older than one month. Update master index and remove all
* jobs older than one month. Also if a job tracker has no jobs in last one month
* remove reference to the job tracker.
*
*/
public static class HistoryCleaner implements Runnable{
static final long ONE_DAY_IN_MS = 24 * 60 * 60 * 1000L;
static final long THIRTY_DAYS_IN_MS = 30 * ONE_DAY_IN_MS;
private long now;
private static boolean isRunning = false;
private static long lastRan;
/**
* Cleans up history data.
*/
public void run(){
if (isRunning){
return;
}
now = System.currentTimeMillis();
// clean history only once a day at max
if (lastRan ==0 || (now - lastRan) < ONE_DAY_IN_MS){
return;
}
lastRan = now;
isRunning = true;
File[] oldFiles = new File(LOG_DIR).listFiles(new FileFilter(){
public boolean accept(File file){
// delete if older than 30 days
if (now - file.lastModified() > THIRTY_DAYS_IN_MS){
return true;
}
return false;
}
});
for(File f : oldFiles){
f.delete();
LOG.info("Deleting old history file : " + f.getName());
}
isRunning = false;
}
}
/**
* Return the TaskLogsUrl of a particular TaskAttempt
*
* @param attempt
* @return the taskLogsUrl. null if http-port or tracker-name or
* task-attempt-id are unavailable.
*/
public static String getTaskLogsUrl(JobHistory.TaskAttempt attempt) {
if (attempt.get(Keys.HTTP_PORT).equals("")
|| attempt.get(Keys.TRACKER_NAME).equals("")
|| attempt.get(Keys.TASK_ATTEMPT_ID).equals("")) {
return null;
}
String taskTrackerName =
JobInProgress.convertTrackerNameToHostName(
attempt.get(Keys.TRACKER_NAME));
return TaskLogServlet.getTaskLogUrl(taskTrackerName, attempt
.get(Keys.HTTP_PORT), attempt.get(Keys.TASK_ATTEMPT_ID));
}
}
| apache-2.0 |
amyvmiwei/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java | 11657 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
// this is deliberately not in the o.a.h.h.regionserver package
// in order to make sure all required classes/method are available
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@Category({MiscTests.class, MediumTests.class})
@RunWith(Parameterized.class)
public class TestCoprocessorScanPolicy {
final Log LOG = LogFactory.getLog(getClass());
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] F = Bytes.toBytes("fam");
private static final byte[] Q = Bytes.toBytes("qual");
private static final byte[] R = Bytes.toBytes("row");
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
ScanObserver.class.getName());
TEST_UTIL.startMiniCluster();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@Parameters
public static Collection<Object[]> parameters() {
return HBaseTestingUtility.BOOLEAN_PARAMETERIZED;
}
public TestCoprocessorScanPolicy(boolean parallelSeekEnable) {
TEST_UTIL.getMiniHBaseCluster().getConf()
.setBoolean(StoreScanner.STORESCANNER_PARALLEL_SEEK_ENABLE, parallelSeekEnable);
}
@Test
public void testBaseCases() throws Exception {
TableName tableName =
TableName.valueOf("baseCases");
if (TEST_UTIL.getHBaseAdmin().tableExists(tableName)) {
TEST_UTIL.deleteTable(tableName);
}
Table t = TEST_UTIL.createTable(tableName, F, 1);
// set the version override to 2
Put p = new Put(R);
p.setAttribute("versions", new byte[]{});
p.add(F, tableName.getName(), Bytes.toBytes(2));
t.put(p);
long now = EnvironmentEdgeManager.currentTime();
// insert 2 versions
p = new Put(R);
p.add(F, Q, now, Q);
t.put(p);
p = new Put(R);
p.add(F, Q, now+1, Q);
t.put(p);
Get g = new Get(R);
g.setMaxVersions(10);
Result r = t.get(g);
assertEquals(2, r.size());
TEST_UTIL.flush(tableName);
TEST_UTIL.compact(tableName, true);
// both version are still visible even after a flush/compaction
g = new Get(R);
g.setMaxVersions(10);
r = t.get(g);
assertEquals(2, r.size());
// insert a 3rd version
p = new Put(R);
p.add(F, Q, now+2, Q);
t.put(p);
g = new Get(R);
g.setMaxVersions(10);
r = t.get(g);
// still only two version visible
assertEquals(2, r.size());
t.close();
}
@Test
public void testTTL() throws Exception {
TableName tableName =
TableName.valueOf("testTTL");
if (TEST_UTIL.getHBaseAdmin().tableExists(tableName)) {
TEST_UTIL.deleteTable(tableName);
}
HTableDescriptor desc = new HTableDescriptor(tableName);
HColumnDescriptor hcd = new HColumnDescriptor(F)
.setMaxVersions(10)
.setTimeToLive(1);
desc.addFamily(hcd);
TEST_UTIL.getHBaseAdmin().createTable(desc);
Table t = TEST_UTIL.getConnection().getTable(tableName);
long now = EnvironmentEdgeManager.currentTime();
ManualEnvironmentEdge me = new ManualEnvironmentEdge();
me.setValue(now);
EnvironmentEdgeManagerTestHelper.injectEdge(me);
// 2s in the past
long ts = now - 2000;
// Set the TTL override to 3s
Put p = new Put(R);
p.setAttribute("ttl", new byte[]{});
p.add(F, tableName.getName(), Bytes.toBytes(3000L));
t.put(p);
p = new Put(R);
p.add(F, Q, ts, Q);
t.put(p);
p = new Put(R);
p.add(F, Q, ts+1, Q);
t.put(p);
// these two should be expired but for the override
// (their ts was 2s in the past)
Get g = new Get(R);
g.setMaxVersions(10);
Result r = t.get(g);
// still there?
assertEquals(2, r.size());
TEST_UTIL.flush(tableName);
TEST_UTIL.compact(tableName, true);
g = new Get(R);
g.setMaxVersions(10);
r = t.get(g);
// still there?
assertEquals(2, r.size());
// roll time forward 2s.
me.setValue(now + 2000);
// now verify that data eventually does expire
g = new Get(R);
g.setMaxVersions(10);
r = t.get(g);
// should be gone now
assertEquals(0, r.size());
t.close();
EnvironmentEdgeManager.reset();
}
public static class ScanObserver extends BaseRegionObserver {
private Map<TableName, Long> ttls =
new HashMap<TableName, Long>();
private Map<TableName, Integer> versions =
new HashMap<TableName, Integer>();
// lame way to communicate with the coprocessor,
// since it is loaded by a different class loader
@Override
public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c, final Put put,
final WALEdit edit, final Durability durability) throws IOException {
if (put.getAttribute("ttl") != null) {
Cell cell = put.getFamilyCellMap().values().iterator().next().get(0);
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
ttls.put(TableName.valueOf(kv.getQualifier()), Bytes.toLong(kv.getValue()));
c.bypass();
} else if (put.getAttribute("versions") != null) {
Cell cell = put.getFamilyCellMap().values().iterator().next().get(0);
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
versions.put(TableName.valueOf(kv.getQualifier()), Bytes.toInt(kv.getValue()));
c.bypass();
}
}
@Override
public InternalScanner preFlushScannerOpen(
final ObserverContext<RegionCoprocessorEnvironment> c,
Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
Long newTtl = ttls.get(store.getTableName());
if (newTtl != null) {
System.out.println("PreFlush:" + newTtl);
}
Integer newVersions = versions.get(store.getTableName());
ScanInfo oldSI = store.getScanInfo();
HColumnDescriptor family = store.getFamily();
ScanInfo scanInfo = new ScanInfo(family.getName(), family.getMinVersions(),
newVersions == null ? family.getMaxVersions() : newVersions,
newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(),
oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
Scan scan = new Scan();
scan.setMaxVersions(newVersions == null ? oldSI.getMaxVersions() : newVersions);
return new StoreScanner(store, scanInfo, scan, Collections.singletonList(memstoreScanner),
ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(),
HConstants.OLDEST_TIMESTAMP);
}
@Override
public InternalScanner preCompactScannerOpen(
final ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<? extends KeyValueScanner> scanners, ScanType scanType,
long earliestPutTs, InternalScanner s) throws IOException {
Long newTtl = ttls.get(store.getTableName());
Integer newVersions = versions.get(store.getTableName());
ScanInfo oldSI = store.getScanInfo();
HColumnDescriptor family = store.getFamily();
ScanInfo scanInfo = new ScanInfo(family.getName(), family.getMinVersions(),
newVersions == null ? family.getMaxVersions() : newVersions,
newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(),
oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
Scan scan = new Scan();
scan.setMaxVersions(newVersions == null ? oldSI.getMaxVersions() : newVersions);
return new StoreScanner(store, scanInfo, scan, scanners, scanType,
store.getSmallestReadPoint(), earliestPutTs);
}
@Override
public KeyValueScanner preStoreScannerOpen(
final ObserverContext<RegionCoprocessorEnvironment> c, Store store, final Scan scan,
final NavigableSet<byte[]> targetCols, KeyValueScanner s) throws IOException {
TableName tn = store.getTableName();
if (!tn.isSystemTable()) {
Long newTtl = ttls.get(store.getTableName());
Integer newVersions = versions.get(store.getTableName());
ScanInfo oldSI = store.getScanInfo();
HColumnDescriptor family = store.getFamily();
ScanInfo scanInfo = new ScanInfo(family.getName(), family.getMinVersions(),
newVersions == null ? family.getMaxVersions() : newVersions,
newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(),
oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
return new StoreScanner(store, scanInfo, scan, targetCols,
((HStore) store).getHRegion().getReadpoint(IsolationLevel.READ_COMMITTED));
} else {
return s;
}
}
}
}
| apache-2.0 |
irhamiqbal/product-apim | modules/integration/tests-integration/tests-backend/src/test/java/org/wso2/am/integration/tests/other/SameVersionAPITestCase.java | 6997 | /*
*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.am.integration.tests.other;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import org.testng.annotations.*;
import org.wso2.am.admin.clients.webapp.WebAppAdminClient;
import org.wso2.am.integration.test.utils.base.APIMIntegrationBaseTest;
import org.wso2.am.integration.test.utils.base.APIMIntegrationConstants;
import org.wso2.am.integration.test.utils.bean.APICreationRequestBean;
import org.wso2.am.integration.test.utils.bean.APIResourceBean;
import org.wso2.am.integration.test.utils.clients.APIPublisherRestClient;
import org.wso2.am.integration.test.utils.generic.TestConfigurationProvider;
import org.wso2.am.integration.test.utils.webapp.WebAppDeploymentUtil;
import org.wso2.carbon.automation.engine.context.TestUserMode;
import org.wso2.carbon.automation.engine.frameworkutils.FrameworkPathUtil;
import org.wso2.carbon.automation.test.utils.common.FileManager;
import org.wso2.carbon.automation.test.utils.http.client.HttpResponse;
import javax.ws.rs.core.Response;
import java.io.File;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
public class SameVersionAPITestCase extends APIMIntegrationBaseTest{
private static final Log log= LogFactory.getLog(SameVersionAPITestCase.class);
private APIPublisherRestClient apiPublisher;
private static final String API_NAME="SameVersionAPITest";
private static final String API_CONTEXT="SameVersionAPI";
private String version="1.0.0";
private String newVersion="1.0.0";
private String TAGS="testtag1, testtag2";
private String providerName;
private String visibility="public";
private String description="Test Description";
private static final String WEB_APP_FILE_NAME="jaxrs_basic";
private String tier= APIMIntegrationConstants.API_TIER.GOLD;
private String resTier= APIMIntegrationConstants.RESOURCE_TIER.ULTIMATE;
private String endPointType="http";
@Factory(dataProvider = "userModeDataProvider")
public SameVersionAPITestCase(TestUserMode userMode){
this.userMode=userMode;
}
@BeforeClass(alwaysRun = true)
public void setEnvironment() throws Exception{
super.init();
String sourcePath= TestConfigurationProvider.getResourceLocation()+ File.separator+
"artifacts"+File.separator+"AM"+File.separator+"lifecycletest"+File.separator+
"jaxrs_basic.war";
String targetPath= FrameworkPathUtil.getCarbonHome()+File.separator+"repository"+
File.separator+"deployment"+File.separator+"server"+File.separator+"webapps";
FileManager.copyResourceToFileSystem(sourcePath, targetPath, "jaxrs_basic.war");
String sessionId = createSession(gatewayContextWrk);
WebAppAdminClient webAppAdminClient =
new WebAppAdminClient(gatewayContextWrk.getContextUrls().getBackEndUrl(), sessionId);
webAppAdminClient.uploadWarFile(sourcePath);
WebAppDeploymentUtil.isWebApplicationDeployed(gatewayContextWrk.getContextUrls().getBackEndUrl(),
sessionId, WEB_APP_FILE_NAME);
log.info("Web App Deployed");
String publisherUrlHttp=publisherUrls.getWebAppURLHttp();
apiPublisher=new APIPublisherRestClient(publisherUrlHttp);
apiPublisher.login(publisherContext.getContextTenant().getContextUser().getUserName(),
publisherContext.getContextTenant().getContextUser().getPassword());
}
@Test(groups = "wso2am", description = "Copy Same Version")
public void copySameVersion() throws Exception{
String gatewayUrl;
if(gatewayContextWrk.getContextTenant().getDomain().equals("carbon.super")){
gatewayUrl=gatewayUrlsWrk.getWebAppURLNhttp();
}
else{
gatewayUrl=gatewayUrlsWrk.getWebAppURLNhttp()+ "t/" + gatewayContextWrk.getContextTenant().getDomain() + "/";
}
String endpointUrl=gatewayUrl+"jaxrs_basic/services/customers/customerservice";
providerName=publisherContext.getContextTenant().getContextUser().getUserName();
List<APIResourceBean> resourceBeanList=new ArrayList<APIResourceBean>();
resourceBeanList.add(new APIResourceBean("GET","Application & Application User", resTier, "customers/{id}/"));
resourceBeanList.add(new APIResourceBean("POST","Application & Application User", resTier, "customers/name/"));
APICreationRequestBean apiCreationRequestBean=new APICreationRequestBean(API_NAME,API_CONTEXT,
version,providerName,new URL(endpointUrl));
apiCreationRequestBean.setResourceBeanList(resourceBeanList);
apiCreationRequestBean.setTags(TAGS);
apiCreationRequestBean.setDescription(description);
apiCreationRequestBean.setTier(tier);
apiCreationRequestBean.setVisibility(visibility);
apiCreationRequestBean.setEndpointType(endPointType);
//add api
HttpResponse apiAddRequest=apiPublisher.addAPI(apiCreationRequestBean);
assertEquals(apiAddRequest.getResponseCode(), Response.Status.OK.getStatusCode(),"Invalid Response Code");
assertTrue(apiAddRequest.getData().contains("{\"error\" : false}"), "Response Data Mismatched");
//Copy api with same version
HttpResponse copyAPIResponse=apiPublisher.copyAPI(providerName,API_NAME,version,newVersion,"");
assertEquals(copyAPIResponse.getResponseCode(),Response.Status.OK.getStatusCode(),"Response Code Mismatched");
assertTrue(copyAPIResponse.getData().contains("\"error\" : true"), "Response Data Mismatched. No error thrown.");
assertTrue(copyAPIResponse.getData().contains("API already exists with version: " + version),
"Response Data Mismatched.");
}
@AfterClass(alwaysRun = true)
public void destroy() throws Exception{
apiPublisher.deleteAPI(API_NAME,version,providerName);
super.cleanUp();
}
@DataProvider
public static Object[][] userModeDataProvider(){
return new Object[][]{
new Object[]{TestUserMode.SUPER_TENANT_ADMIN},
new Object[]{TestUserMode.TENANT_ADMIN},
};
}
}
| apache-2.0 |
Commonjava/indy | subsys/flatfile/src/main/java/org/commonjava/indy/subsys/datafile/change/DataFileEventManager.java | 1713 | /**
* Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.commonjava.indy.subsys.datafile.change;
import org.commonjava.indy.audit.ChangeSummary;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.event.Event;
import javax.inject.Inject;
import java.io.File;
import static org.commonjava.indy.change.EventUtils.fireEvent;
/**
* Helper class to provide simple methods to handle null-checking, etc. around the firing of Indy filesystem events.
*/
@ApplicationScoped
public class DataFileEventManager
{
@Inject
private Event<DataFileEvent> events;
public void fire( final DataFileEvent evt )
{
fireEvent( events, evt );
}
public void accessed( final File file )
{
fire( new DataFileEvent( file ) );
}
public void modified( final File file, final ChangeSummary summary )
{
fire( new DataFileEvent( file, DataFileEventType.modified, summary ) );
}
public void deleted( final File file, final ChangeSummary summary )
{
fire( new DataFileEvent( file, DataFileEventType.deleted, summary ) );
}
}
| apache-2.0 |
wisebaldone/incubator-wave | wave/src/main/java/org/waveprotocol/wave/model/conversation/AnchorManager.java | 9214 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.waveprotocol.wave.model.conversation;
import org.waveprotocol.wave.model.util.CollectionUtils;
import org.waveprotocol.wave.model.util.CopyOnWriteSet;
import org.waveprotocol.wave.model.wave.ObservableMap;
import org.waveprotocol.wave.model.wave.SourcesEvents;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
/**
* Managers anchoring of transient parasites to transient items via fixed
* locations.
*
* A "location" is understood to mean a reference to an "item", where items are
* transient: over time, they come into existence and go out of existence. At
* each location, any number of "parasites" can be attached. Parasites can also
* be attached to no location. Parasites too are transient: they come and go
* over time.
*
* This class reveals all the parasites, as being either {@link #getAttached()
* attached} to an item (via a hidden location), or {@link #getUnattached()
* unattached} to any item (either because the parasites is not bound to a
* location, or it is bound to a location that has no item). This view is kept
* live.
*
*
* <ul>
* <li>the item to location mapping is a surjection (every item has exactly one
* location, and every location has at moast one item);</li>
* <li>the parasite to location mapping is a partial function (every parasite
* maps to at most one location, every location maps to any number of
* parasites); and</li>
* <li>the two mappings share the same co-domain (the meaning of 'locations' is
* the same)</li>
* </ul>
*
* @param <L> location type
* @param <I> item type
* @param <P> parasite type
*/
public final class AnchorManager<L, I, P> implements
SourcesEvents<AnchorManager.Listener<? super I, ? super P>> {
interface Listener<I, P> {
/**
* Notifies this listener that some parasites have been attached.
*
* This event occurs:
* <ul>
* <li>when new parasites are added to a location that has an item;</li>
* <li>when new parasites are added to a location that has no item; and</li>
* <li>when an item is added in a location to which parasites had previously
* been added.</li>
* </ul>
*
* @param item item to which the parasites are attached, or {@code null} for
* unanchored parasites
* @param parasites newly attached parasites (non-empty)
*/
void onAttached(I item, Collection<? extends P> parasites);
/**
* Notifies this listener that some parasites have been detached.
*
* This event occurs:
* <ul>
* <li>when new parasites are removed from a location that has an item;</li>
* <li>when new parasites are fromved from a location that has no item; and</li>
* <li>when an item is removed from a location to which parasites are
* attached.</li>
* </ul>
*
* @param item item where the parasites were attached, or {@code null} for
* unanchored parasites
* @param parasites newly detached parasite (non-empty)
*/
void onDetached(I item, Collection<? extends P> parasites);
}
//
// External helpers.
//
/** Maps location references to (transient) items. */
private final ObservableMap<L, ? extends I> locationResolver;
//
// Internal state.
//
/** Parasites attached to items (via locations). */
private final Map<I, Collection<P>> attached = CollectionUtils.newHashMap();
/** Parasites unattached (non-existent item or location). */
private final Map<L, Collection<P>> unattached = CollectionUtils.newHashMap();
/** Internal observer of location map. */
private final ObservableMap.Listener<L, I> itemObserver = new ObservableMap.Listener<L, I>() {
@Override
public void onEntryAdded(L location, I item) {
AnchorManager.this.onEntryAdded(location, item);
}
@Override
public void onEntryRemoved(L location, I item) {
AnchorManager.this.onEntryRemoved(location, item);
}
};
/** Listeners. */
private final CopyOnWriteSet<Listener<? super I, ? super P>> listeners = CopyOnWriteSet.create();
/**
* Creates an anchor manager.
*/
private AnchorManager(ObservableMap<L, ? extends I> locationResolver) {
this.locationResolver = locationResolver;
}
/**
* Creates an anchor manager.
*
* @param items map from location references to items in those locations.
* @return a new anchor manager.
*/
public static <L, I, P> AnchorManager<L, I, P> create(ObservableMap<L, ? extends I> items) {
AnchorManager<L, I, P> m = new AnchorManager<L, I, P>(items);
m.init();
return m;
}
/**
* Observes the location map.
*/
private void init() {
locationResolver.addListener(itemObserver);
}
/**
* Destroys this manager, releasing all resources it is using.
*/
public void destroy() {
locationResolver.removeListener(itemObserver);
}
/**
* Attaches a parasite at a location, notifying listeners of the
* {@link Listener#onAttached(Object, Collection) attachment} event.
*
* @param location
* @param parasite
*/
public void attachParasite(L location, P parasite) {
// Does key point to something that exists yet?
I item = locationResolver.get(location);
if (item != null) {
put(attached, item, parasite);
} else {
put(unattached, location, parasite);
}
triggerOnAttached(item, parasite);
}
/**
* Detaches a parasite from a location, notifying listeners of the
* {@link Listener#onDetached(Object, Collection) detachment} event.
*
* @param location
* @param parasite
*/
public void detachParasite(L location, P parasite) {
I item = locationResolver.get(location);
if (item != null) {
remove(attached, item, parasite);
} else {
remove(unattached, location, parasite);
}
triggerOnDetached(item, parasite);
}
//
// Helper methods for maps from keys to lazy collections.
//
/**
* Puts a value in a lazy-collection map.
*/
private static <K, P> void put(Map<K, Collection<P>> map, K key, P value) {
Collection<P> keyValues = map.get(key);
if (keyValues == null) {
keyValues = CollectionUtils.newHashSet();
map.put(key, keyValues);
}
keyValues.add(value);
}
/**
* Removes a value from a lazy-collection map.
*/
private static <K, P> void remove(Map<K, Collection<P>> map, K key, P value) {
Collection<P> keyValues = map.get(key);
keyValues.remove(value);
if (keyValues.isEmpty()) {
map.remove(key);
}
}
//
// Location map events.
//
private void onEntryAdded(L location, I item) {
Collection<P> parasites = unattached.remove(location);
if (parasites != null) {
attached.put(item, parasites);
triggerOnDetached(null, parasites);
triggerOnAttached(item, parasites);
}
}
private void onEntryRemoved(L location, I item) {
Collection<P> parasites = attached.remove(item);
if (parasites != null) {
unattached.put(location, parasites);
triggerOnDetached(item, parasites);
triggerOnAttached(null, parasites);
}
}
//
// Anchoring state.
//
public Map<I, Collection<P>> getAttached() {
return Collections.unmodifiableMap(attached);
}
public Collection<P> getUnattached() {
Collection<P> allUnattached = CollectionUtils.newArrayList();
for (Collection<P> unattachedValues : unattached.values()) {
allUnattached.addAll(unattachedValues);
}
return allUnattached;
}
//
// Anchoring events.
//
@Override
public void addListener(Listener<? super I, ? super P> listener) {
listeners.add(listener);
}
@Override
public void removeListener(Listener<? super I, ? super P> listener) {
listeners.remove(listener);
}
private void triggerOnAttached(I item, P parasite) {
triggerOnAttached(item, Collections.singleton(parasite));
}
private void triggerOnDetached(I item, P parasite) {
triggerOnDetached(item, Collections.singleton(parasite));
}
private void triggerOnAttached(I item, Collection<P> parasites) {
for (Listener<? super I, ? super P> listener : listeners) {
listener.onAttached(item, parasites);
}
}
private void triggerOnDetached(I item, Collection<P> parasites) {
for (Listener<? super I, ? super P> listener : listeners) {
listener.onDetached(item, parasites);
}
}
}
| apache-2.0 |
codeaudit/OG-Platform | projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/model/volatility/local/DermanKaniImpliedBinomialTreeModel.java | 8191 | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.model.volatility.local;
import org.apache.commons.lang.Validate;
import org.threeten.bp.ZonedDateTime;
import com.opengamma.analytics.financial.model.option.definition.BinomialOptionModelDefinition;
import com.opengamma.analytics.financial.model.option.definition.CoxRossRubinsteinBinomialOptionModelDefinition;
import com.opengamma.analytics.financial.model.option.definition.EuropeanVanillaOptionDefinition;
import com.opengamma.analytics.financial.model.option.definition.OptionDefinition;
import com.opengamma.analytics.financial.model.option.definition.StandardOptionDataBundle;
import com.opengamma.analytics.financial.model.option.pricing.tree.BinomialOptionModel;
import com.opengamma.analytics.financial.model.tree.RecombiningBinomialTree;
import com.opengamma.util.time.DateUtils;
import com.opengamma.util.time.Expiry;
/**
* Implementation of the paper by Derman and Kani, The Volatility Smile and its Implied Tree (1994)
*/
public class DermanKaniImpliedBinomialTreeModel implements ImpliedTreeModel<OptionDefinition, StandardOptionDataBundle> {
private static final BinomialOptionModelDefinition<OptionDefinition, StandardOptionDataBundle> CRR = new CoxRossRubinsteinBinomialOptionModelDefinition();
private final int _n;
public DermanKaniImpliedBinomialTreeModel(final int n) {
Validate.isTrue(n > 0);
_n = n;
}
@Override
public ImpliedTreeResult getImpliedTrees(final OptionDefinition definition, final StandardOptionDataBundle data) {
Validate.notNull(definition, "definition");
Validate.notNull(data, "data");
final int m1 = RecombiningBinomialTree.NODES.evaluate(_n);
final int m2 = RecombiningBinomialTree.NODES.evaluate(_n - 1);
final double[][] impliedTree = new double[_n + 1][m1]; //TODO this wastes space
final double[] transitionProbabilities = new double[m2];
double[] arrowDebreu = new double[m1];
final double[][] localVolatilityTree = new double[_n][m2];
final double dt = definition.getTimeToExpiry(data.getDate()) / _n;
double t = 0;
final double spot = data.getSpot();
impliedTree[0][0] = spot;
arrowDebreu[0] = 1;
int previousNodes = 1;
final ZonedDateTime date = data.getDate();
for (int i = 1; i < _n + 1; i++) {
final int nodes = RecombiningBinomialTree.NODES.evaluate(i);
final BinomialOptionModel<StandardOptionDataBundle> crrModel = new BinomialOptionModel<>(CRR, i);
t += dt;
final double df1 = Math.exp(dt * data.getInterestRate(t));
final double df2 = Math.exp(dt * data.getCostOfCarry());
final Expiry expiry = new Expiry(DateUtils.getDateOffsetWithYearFraction(date, t));
final int mid = i / 2;
if (i % 2 == 0) {
impliedTree[i][mid] = spot;
addUpperNodes(data, impliedTree, arrowDebreu, i, crrModel, df1, df2, expiry, mid + 1);
addLowerNodes(data, impliedTree, arrowDebreu, i, crrModel, df1, df2, expiry, mid - 1);
} else {
final double c = crrModel.getTreeGeneratingFunction(new EuropeanVanillaOptionDefinition(spot, expiry, true)).evaluate(data).getNode(0, 0).second;
final double sigma = getUpperSigma(impliedTree, arrowDebreu, i - 1, df2, mid + 1);
impliedTree[i][mid + 1] = spot * (df1 * c + arrowDebreu[mid] * spot - sigma) / (arrowDebreu[mid] * impliedTree[i - 1][mid] * df2 - df1 * c + sigma);
impliedTree[i][mid] = spot * spot / impliedTree[i][mid + 1];
addUpperNodes(data, impliedTree, arrowDebreu, i, crrModel, df1, df2, expiry, mid + 2);
addLowerNodes(data, impliedTree, arrowDebreu, i, crrModel, df1, df2, expiry, mid - 1);
}
for (int j = 0; j < previousNodes; j++) {
final double f = impliedTree[i - 1][j] * df2;
transitionProbabilities[j] = (f - impliedTree[i][j]) / (impliedTree[i][j + 1] - impliedTree[i][j]);
//TODO emcleod 31-8-10 Need to check that transition probabilities are positive - use adjustment suggested in "The Volatility Smile and its Implied Tree"
localVolatilityTree[i - 1][j] = Math.sqrt(transitionProbabilities[j] * (1 - transitionProbabilities[j])) * Math.log(impliedTree[i][j + 1] / impliedTree[i][j]); //TODO need 1/sqrt(dt) here
}
final double[] temp = new double[m1];
temp[0] = (1 - transitionProbabilities[0]) * arrowDebreu[0] / df1;
temp[nodes - 1] = (transitionProbabilities[previousNodes - 1] * arrowDebreu[previousNodes - 1]) / df1;
for (int j = 1; j < nodes - 1; j++) {
temp[j] = (transitionProbabilities[j - 1] * arrowDebreu[j - 1] + (1 - transitionProbabilities[j]) * arrowDebreu[j]) / df1;
}
arrowDebreu = temp;
previousNodes = nodes;
}
final Double[][] impliedTreeResult = new Double[_n + 1][m1];
final Double[][] localVolResult = new Double[_n][m2];
for (int i = 0; i < impliedTree.length; i++) {
for (int j = 0; j < impliedTree[i].length; j++) {
impliedTreeResult[i][j] = impliedTree[i][j];
if (i < _n && j < m2) {
localVolResult[i][j] = localVolatilityTree[i][j];
}
}
}
return new ImpliedTreeResult(new RecombiningBinomialTree<>(impliedTreeResult), new RecombiningBinomialTree<>(localVolResult));
}
private void addLowerNodes(final StandardOptionDataBundle data, final double[][] impliedTree, final double[] arrowDebreu, final int step,
final BinomialOptionModel<StandardOptionDataBundle> crrModel, final double df1, final double df2, final Expiry expiry, final int mid) {
double sigma = getLowerSigma(impliedTree, arrowDebreu, step - 1, df2, mid);
for (int i = mid; i >= 0; i--) {
final double p = crrModel.getTreeGeneratingFunction(new EuropeanVanillaOptionDefinition(impliedTree[step - 1][i], expiry, false)).evaluate(data).getNode(0, 0).second;
final double forward = impliedTree[step - 1][i] * df2;
impliedTree[step][i] = (impliedTree[step][i + 1] * (df1 * p - sigma) + arrowDebreu[i] * impliedTree[step - 1][i] * (forward - impliedTree[step][i + 1]))
/ (df1 * p - sigma + arrowDebreu[i] * (forward - impliedTree[step][i + 1]));
if (i > 0) {
sigma -= arrowDebreu[i - 1] * (impliedTree[step - 1][i] - impliedTree[step - 1][i - 1] * df2);
}
}
}
private void addUpperNodes(final StandardOptionDataBundle data, final double[][] impliedTree, final double[] arrowDebreu, final int step,
final BinomialOptionModel<StandardOptionDataBundle> crrModel, final double df1, final double df2, final Expiry expiry, final int mid) {
double sigma = getUpperSigma(impliedTree, arrowDebreu, step - 1, df2, mid);
for (int i = mid; i < RecombiningBinomialTree.NODES.evaluate(step); i++) {
final double c = crrModel.getTreeGeneratingFunction(new EuropeanVanillaOptionDefinition(impliedTree[step - 1][i - 1], expiry, true)).evaluate(data).getNode(0, 0).second;
final double forward = impliedTree[step - 1][i - 1] * df2;
impliedTree[step][i] = (impliedTree[step][i - 1] * (df1 * c - sigma) - arrowDebreu[i - 1] * impliedTree[step - 1][i - 1] * (forward - impliedTree[step][i - 1]))
/ (df1 * c - sigma - arrowDebreu[i - 1] * (forward - impliedTree[step][i - 1]));
sigma -= arrowDebreu[i] * (impliedTree[step - 1][i] * df2 - impliedTree[step - 1][i - 1]);
}
}
private double getLowerSigma(final double[][] impliedTree, final double[] arrowDebreu, final int previousStep, final double df2, final int start) {
double sigma = 0;
for (int i = start - 1; i >= 0; i--) {
sigma += arrowDebreu[i] * (impliedTree[previousStep][start] - impliedTree[previousStep][i] * df2);
}
return sigma;
}
private double getUpperSigma(final double[][] impliedTree, final double[] arrowDebreu, final int previousStep, final double df2, final int start) {
double sigma = 0;
for (int i = start; i < RecombiningBinomialTree.NODES.evaluate(previousStep + 1); i++) {
sigma += arrowDebreu[i] * (impliedTree[previousStep][i] * df2 - impliedTree[previousStep][start - 1]);
}
return sigma;
}
}
| apache-2.0 |
topicusonderwijs/wicket | wicket-core/src/test/java/org/apache/wicket/markup/html/form/NestedFormsPage.java | 4450 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.markup.html.form;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.form.AjaxSubmitLink;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.MarkupStream;
import org.apache.wicket.markup.html.WebPage;
import org.apache.wicket.markup.html.form.validation.EqualInputValidator;
import org.apache.wicket.markup.html.link.Link;
import org.apache.wicket.markup.html.panel.FeedbackPanel;
import org.apache.wicket.model.CompoundPropertyModel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Gerolf Seitz
*/
public class NestedFormsPage extends WebPage
{
private static final long serialVersionUID = 1L;
static Logger logger = LoggerFactory.getLogger(NestedFormsPage.class);
private final FeedbackPanel feedback;
public String submitOrder = "";
public String errorOrder = "";
/**
* Construct.
*/
public NestedFormsPage()
{
feedback = new FeedbackPanel("feedback");
add(feedback.setOutputMarkupId(true));
Form<?> outerForm = new NestableForm("outerForm");
add(outerForm.setOutputMarkupId(true));
Form<?> middleForm = new NestableForm("middleForm");
outerForm.add(middleForm.setOutputMarkupId(true));
Form<?> innerForm = new NestableForm("innerForm");
middleForm.add(innerForm.setOutputMarkupId(true));
}
/**
* @author Gerolf Seitz
*/
public class NestableForm extends Form<NestableForm>
{
private static final long serialVersionUID = 1L;
private final String first = "test";
private final String second = "test";
/** */
public boolean onSubmitCalled = false;
/** */
public boolean onErrorCalled = false;
/**
* Construct.
*
* @param id
* the form's id
*/
public NestableForm(String id)
{
super(id);
setDefaultModel(new CompoundPropertyModel<NestableForm>(this));
TextField<String> firstField = new RequiredTextField<String>("first");
TextField<String> secondField = new TextField<String>("second");
add(firstField);
add(secondField);
add(new EqualInputValidator(firstField, secondField));
add(new AjaxSubmitLink("ajaxSubmit", this)
{
private static final long serialVersionUID = 1L;
@Override
protected void onSubmit(AjaxRequestTarget target, Form<?> form)
{
target.add(feedback);
}
@Override
protected void onError(AjaxRequestTarget target, Form<?> form)
{
target.add(feedback);
}
});
add(new ToggleLink("toggle", this));
add(new SubmitLink("submit"));
}
@Override
protected void onSubmit()
{
super.onSubmit();
onSubmitCalled = true;
logger.info(getId() + ".onSubmit");
submitOrder += getId();
}
@Override
protected void onError()
{
super.onError();
onErrorCalled = true;
logger.info(getId() + ".onError");
errorOrder += getId();
}
}
private class ToggleLink extends Link<Void>
{
private static final long serialVersionUID = 1L;
private final Form<?> form;
public ToggleLink(String id, Form<?> form)
{
super(id);
this.form = form;
}
@Override
public void onClick()
{
form.setEnabled(!form.isEnabled());
form.info(form.getId() + ".isEnabled() == " + form.isEnabled());
}
@Override
public void onComponentTagBody(MarkupStream markupStream, ComponentTag openTag)
{
String state = form.isEnabled() ? "enabled" : "disabled";
replaceComponentTagBody(markupStream, openTag, "form is " + state);
}
}
}
| apache-2.0 |
DevStreet/FinanceAnalytics | projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/sabrcube/defaultproperties/SABRRightExtrapolationDefaults.java | 9524 | /**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.analytics.model.sabrcube.defaultproperties;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.core.security.Security;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.value.SurfaceAndCubePropertyNames;
import com.opengamma.engine.value.ValuePropertyNames;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueRequirementNames;
import com.opengamma.financial.analytics.OpenGammaFunctionExclusions;
import com.opengamma.financial.analytics.conversion.SwapSecurityUtils;
import com.opengamma.financial.analytics.fixedincome.InterestRateInstrumentType;
import com.opengamma.financial.analytics.model.sabrcube.SABRRightExtrapolationFunction;
import com.opengamma.financial.analytics.model.volatility.SmileFittingPropertyNamesAndValues;
import com.opengamma.financial.property.DefaultPropertyFunction;
import com.opengamma.financial.security.FinancialSecurityTypes;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.financial.security.swap.SwapSecurity;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.Currency;
/**
* Defaults for SABR functions with right extrapolation
* @deprecated The functions to which these defaults apply are deprecated.
*/
@Deprecated
public class SABRRightExtrapolationDefaults extends DefaultPropertyFunction {
/** The logger */
private static final Logger s_logger = LoggerFactory.getLogger(SABRRightExtrapolationDefaults.class);
/** The value requirements for which these defaults apply */
private static final String[] VALUE_REQUIREMENTS = new String[] {
ValueRequirementNames.PRESENT_VALUE,
ValueRequirementNames.PRESENT_VALUE_CURVE_SENSITIVITY,
ValueRequirementNames.PRESENT_VALUE_SABR_ALPHA_SENSITIVITY,
ValueRequirementNames.PRESENT_VALUE_SABR_RHO_SENSITIVITY,
ValueRequirementNames.PRESENT_VALUE_SABR_NU_SENSITIVITY,
ValueRequirementNames.PRESENT_VALUE_SABR_ALPHA_NODE_SENSITIVITY,
ValueRequirementNames.PRESENT_VALUE_SABR_RHO_NODE_SENSITIVITY,
ValueRequirementNames.PRESENT_VALUE_SABR_NU_NODE_SENSITIVITY,
ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES,
};
/** The SABR surface fitting method */
private final String _fittingMethod;
/** The extrapolation cutoff */
private final String _cutoff;
/** Mu */
private final String _mu;
/**
* A map from currency to (curve config, cube definition, cube specification, forward swap surface definition,
* forward swap surface specification) names
*/
private final Map<String, List<String>> _currencyAndConfigNames;
/**
* @param fittingMethod The fitting method name, not null
* @param cutoff The extrapolation cutoff, not null
* @param mu Mu, not null
* @param currencyAndConfigNames A list of either (currency, curve config, cube) triples or
* (currency, cube definition name, cube specification name, forward surface definition name,
* forward surface specification name) tuples, not null
*/
public SABRRightExtrapolationDefaults(final String fittingMethod, final String cutoff, final String mu,
final String... currencyAndConfigNames) {
super(FinancialSecurityTypes.SWAPTION_SECURITY
.or(FinancialSecurityTypes.SWAP_SECURITY)
.or(FinancialSecurityTypes.CAP_FLOOR_SECURITY)
.or(FinancialSecurityTypes.CAP_FLOOR_CMS_SPREAD_SECURITY),
true);
ArgumentChecker.notNull(fittingMethod, "fittingMethod");
ArgumentChecker.notNull(cutoff, "cutoff");
ArgumentChecker.notNull(mu, "mu");
ArgumentChecker.notNull(currencyAndConfigNames, "currencyAndConfigNames");
_fittingMethod = fittingMethod;
_cutoff = cutoff;
_mu = mu;
final int nConfigs = currencyAndConfigNames.length;
_currencyAndConfigNames = new HashMap<>();
boolean oldConfigs = true;
ArgumentChecker.isTrue(nConfigs % 3 == 0, "Incorrect number of default arguments");
for (int i = 0; i < nConfigs; i += 3) {
// Sets cube definition and specification and forward surface definition and specification names equal
// to the argument after the curve config. This will not work correctly all of the time (e.g. if some
// of the cube / surface config names could be parsed as a currency ISO. This code is here to maintain
// backwards compatibility with code in SABRFunction that did not set these properties explicitly
try {
Currency.of(currencyAndConfigNames[i]);
} catch (final IllegalArgumentException e) {
oldConfigs = false;
}
}
if (oldConfigs) {
for (int i = 0; i < nConfigs; i += 3) {
final String cubeAndSurfaceName = currencyAndConfigNames[i + 2];
final List<String> configs = Arrays.asList(currencyAndConfigNames[i + 1], cubeAndSurfaceName, cubeAndSurfaceName,
cubeAndSurfaceName, cubeAndSurfaceName);
_currencyAndConfigNames.put(currencyAndConfigNames[i], configs);
}
} else {
ArgumentChecker.isTrue(nConfigs % 6 == 0, "Incorrect number of default arguments");
for (int i = 0; i < nConfigs; i += 6) {
final List<String> configs = Arrays.asList(currencyAndConfigNames[i + 1], currencyAndConfigNames[i + 2], currencyAndConfigNames[i + 3],
currencyAndConfigNames[i + 4], currencyAndConfigNames[i + 5]);
_currencyAndConfigNames.put(currencyAndConfigNames[i], configs);
}
}
}
@Override
public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) {
final Security security = target.getSecurity();
if (security instanceof SwapSecurity) {
if (!InterestRateInstrumentType.isFixedIncomeInstrumentType((SwapSecurity) security)) {
return false;
}
final InterestRateInstrumentType type = SwapSecurityUtils.getSwapType((SwapSecurity) security);
if ((type != InterestRateInstrumentType.SWAP_FIXED_CMS) &&
(type != InterestRateInstrumentType.SWAP_CMS_CMS) &&
(type != InterestRateInstrumentType.SWAP_IBOR_CMS)) {
return false;
}
}
final String currencyName = FinancialSecurityUtils.getCurrency(security).getCode();
return _currencyAndConfigNames.containsKey(currencyName);
}
@Override
protected void getDefaults(final PropertyDefaults defaults) {
for (final String valueRequirement : VALUE_REQUIREMENTS) {
defaults.addValuePropertyName(valueRequirement, ValuePropertyNames.CURVE_CALCULATION_CONFIG);
defaults.addValuePropertyName(valueRequirement, SurfaceAndCubePropertyNames.PROPERTY_CUBE_DEFINITION);
defaults.addValuePropertyName(valueRequirement, SurfaceAndCubePropertyNames.PROPERTY_CUBE_SPECIFICATION);
defaults.addValuePropertyName(valueRequirement, SurfaceAndCubePropertyNames.PROPERTY_SURFACE_DEFINITION);
defaults.addValuePropertyName(valueRequirement, SurfaceAndCubePropertyNames.PROPERTY_SURFACE_SPECIFICATION);
defaults.addValuePropertyName(valueRequirement, SmileFittingPropertyNamesAndValues.PROPERTY_FITTING_METHOD);
defaults.addValuePropertyName(valueRequirement, SABRRightExtrapolationFunction.PROPERTY_CUTOFF_STRIKE);
defaults.addValuePropertyName(valueRequirement, SABRRightExtrapolationFunction.PROPERTY_TAIL_THICKNESS_PARAMETER);
}
}
@Override
protected Set<String> getDefaultValue(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue, final String propertyName) {
final String currencyName = FinancialSecurityUtils.getCurrency(target.getSecurity()).getCode();
if (!_currencyAndConfigNames.containsKey(currencyName)) {
s_logger.error("Could not get configs for currency " + currencyName + "; should never happen");
return null;
}
if (SmileFittingPropertyNamesAndValues.PROPERTY_FITTING_METHOD.equals(propertyName)) {
return Collections.singleton(_fittingMethod);
}
if (SABRRightExtrapolationFunction.PROPERTY_CUTOFF_STRIKE.equals(propertyName)) {
return Collections.singleton(_cutoff);
}
if (SABRRightExtrapolationFunction.PROPERTY_TAIL_THICKNESS_PARAMETER.equals(propertyName)) {
return Collections.singleton(_mu);
}
final List<String> configs = _currencyAndConfigNames.get(currencyName);
if (ValuePropertyNames.CURVE_CALCULATION_CONFIG.equals(propertyName)) {
return Collections.singleton(configs.get(0));
}
if (SurfaceAndCubePropertyNames.PROPERTY_CUBE_DEFINITION.equals(propertyName)) {
return Collections.singleton(configs.get(1));
}
if (SurfaceAndCubePropertyNames.PROPERTY_CUBE_SPECIFICATION.equals(propertyName)) {
return Collections.singleton(configs.get(2));
}
if (SurfaceAndCubePropertyNames.PROPERTY_SURFACE_DEFINITION.equals(propertyName)) {
return Collections.singleton(configs.get(3));
}
if (SurfaceAndCubePropertyNames.PROPERTY_SURFACE_SPECIFICATION.equals(propertyName)) {
return Collections.singleton(configs.get(4));
}
return null;
}
@Override
public String getMutualExclusionGroup() {
return OpenGammaFunctionExclusions.SABR_FITTING_DEFAULTS;
}
}
| apache-2.0 |
AndreasAbdi/jackrabbit-oak | oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/cache/OffHeapCache.java | 1619 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.cache;
import java.util.Map;
import javax.annotation.Nullable;
import com.google.common.cache.Cache;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.cache.CacheValue;
import org.apache.jackrabbit.oak.plugins.document.CachedNodeDocument;
import org.apache.jackrabbit.oak.plugins.document.NodeDocument;
/**
* An OffHeap cache manages the cache value in an off heap storage.
*
* This interface is required to avoid direct dependency on DirectMemory
* and Kryo classes
*/
public interface OffHeapCache extends Cache<CacheValue, NodeDocument> {
Map<CacheValue, ? extends CachedNodeDocument> offHeapEntriesMap();
CacheStats getCacheStats();
@Nullable
CachedNodeDocument getCachedDocument(String id);
}
| apache-2.0 |
sdnwiselab/onos | drivers/default/src/main/java/org/onosproject/driver/extensions/codec/NiciraSetNshContextHeaderCodec.java | 2693 | /*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.driver.extensions.codec;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.onosproject.codec.CodecContext;
import org.onosproject.codec.JsonCodec;
import org.onosproject.driver.extensions.NiciraSetNshContextHeader;
import org.onosproject.net.NshContextHeader;
import org.onosproject.net.flow.instructions.ExtensionTreatmentType;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.onlab.util.Tools.nullIsIllegal;
/**
* JSON Codec for NiciraSetNshContextHeader class.
*/
public class NiciraSetNshContextHeaderCodec extends JsonCodec<NiciraSetNshContextHeader> {
private static final String NSH_CONTEXT_HEADER = "nshch";
private static final String TYPE = "type";
private static final String MISSING_MEMBER_MESSAGE = " member is required in NiciraSetNshContextHeader";
@Override
public ObjectNode encode(NiciraSetNshContextHeader niciraSetNshContextHeader, CodecContext context) {
checkNotNull(niciraSetNshContextHeader, "Nicira Set Nsh Context Header cannot be null");
ObjectNode root = context.mapper().createObjectNode()
.put(NSH_CONTEXT_HEADER, niciraSetNshContextHeader.nshCh().nshContextHeader())
.put(TYPE, niciraSetNshContextHeader.type().type());
return root;
}
@Override
public NiciraSetNshContextHeader decode(ObjectNode json, CodecContext context) {
if (json == null || !json.isObject()) {
return null;
}
// parse nsh context header
int contextHeaderInt = nullIsIllegal(json.get(NSH_CONTEXT_HEADER),
NSH_CONTEXT_HEADER + MISSING_MEMBER_MESSAGE).asInt();
NshContextHeader contextHeader = NshContextHeader.of(contextHeaderInt);
// parse type
int extensionTypeInt = nullIsIllegal(json.get(TYPE),
TYPE + MISSING_MEMBER_MESSAGE).asInt();
ExtensionTreatmentType type = new ExtensionTreatmentType(extensionTypeInt);
return new NiciraSetNshContextHeader(contextHeader, type);
}
}
| apache-2.0 |
jeorme/OG-Platform | projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/provider/description/forex/BlackForexVannaVolgaProvider.java | 5273 | /**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.provider.description.forex;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang.ObjectUtils;
import com.opengamma.analytics.financial.model.option.definition.SmileDeltaParameters;
import com.opengamma.analytics.financial.model.volatility.surface.SmileDeltaTermStructureParameters;
import com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderInterface;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.ForwardSensitivity;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.Currency;
import com.opengamma.util.tuple.DoublesPair;
import com.opengamma.util.tuple.Pair;
/**
* Interface for Forex Black with smile parameters provider for a currency pair.
*/
public class BlackForexVannaVolgaProvider implements BlackForexVannaVolgaProviderInterface {
/**
* The multicurve provider.
*/
private final MulticurveProviderInterface _multicurveProvider;
/**
* The volatility model for one currency pair.
*/
private final SmileDeltaTermStructureParameters _smile;
/**
* The currency pair for which the volatility data are valid.
*/
private final Pair<Currency, Currency> _currencyPair;
/**
* Constructor from exiting multicurveProvider and volatility model. The given provider and parameters are used for the new provider (the same maps are used, not copied).
* @param multicurves The multi-curves provider.
* @param smile Smile.
* @param currencyPair The currency pair.
*/
public BlackForexVannaVolgaProvider(final MulticurveProviderInterface multicurves, final SmileDeltaTermStructureParameters smile, final Pair<Currency, Currency> currencyPair) {
ArgumentChecker.notNull(multicurves, "multicurves");
ArgumentChecker.notNull(smile, "smile");
ArgumentChecker.notNull(currencyPair, "currencyPair");
_multicurveProvider = multicurves;
_smile = smile;
_currencyPair = currencyPair;
}
@Override
public BlackForexVannaVolgaProvider copy() {
final MulticurveProviderInterface multicurveProvider = _multicurveProvider.copy();
return new BlackForexVannaVolgaProvider(multicurveProvider, _smile, _currencyPair);
}
@Override
public SmileDeltaTermStructureParameters getVolatility() {
return _smile;
}
@Override
public Pair<Currency, Currency> getCurrencyPair() {
return _currencyPair;
}
@Override
public boolean checkCurrencies(final Currency ccy1, final Currency ccy2) {
if ((ccy1.equals(_currencyPair.getFirst())) && ccy2.equals(_currencyPair.getSecond())) {
return true;
}
if ((ccy2.equals(_currencyPair.getFirst())) && ccy1.equals(_currencyPair.getSecond())) {
return true;
}
return false;
}
@Override
public MulticurveProviderInterface getMulticurveProvider() {
return _multicurveProvider;
}
/**
* Returns volatility smile for an expiration.
* @param ccy1 The first currency.
* @param ccy2 The second currency.
* @param time The expiration time.
* @return The smile.
*/
@Override
public SmileDeltaParameters getSmile(final Currency ccy1, final Currency ccy2, final double time) {
ArgumentChecker.notNull(ccy1, "first currency");
ArgumentChecker.notNull(ccy2, "second currency");
ArgumentChecker.isTrue(checkCurrencies(ccy1, ccy2), "Incomptabile currencies");
final SmileDeltaParameters smile = _smile.getSmileForTime(time);
if (ccy1.equals(getCurrencyPair().getFirst()) && ccy2.equals(getCurrencyPair().getSecond())) {
return smile;
}
throw new NotImplementedException("Currency pair is not in expected order " + getCurrencyPair().toString());
}
@Override
public double[] parameterSensitivity(final String name, final List<DoublesPair> pointSensitivity) {
return _multicurveProvider.parameterSensitivity(name, pointSensitivity);
}
@Override
public double[] parameterForwardSensitivity(final String name, final List<ForwardSensitivity> pointSensitivity) {
return _multicurveProvider.parameterForwardSensitivity(name, pointSensitivity);
}
@Override
public Set<String> getAllCurveNames() {
return _multicurveProvider.getAllCurveNames();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + _currencyPair.hashCode();
result = prime * result + _multicurveProvider.hashCode();
result = prime * result + _smile.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof BlackForexVannaVolgaProvider)) {
return false;
}
final BlackForexVannaVolgaProvider other = (BlackForexVannaVolgaProvider) obj;
if (!ObjectUtils.equals(_currencyPair, other._currencyPair)) {
return false;
}
if (!ObjectUtils.equals(_multicurveProvider, other._multicurveProvider)) {
return false;
}
if (!ObjectUtils.equals(_smile, other._smile)) {
return false;
}
return true;
}
}
| apache-2.0 |
gladyscarrizales/manifoldcf | connectors/jira/connector/src/main/java/org/apache/manifoldcf/crawler/connectors/jira/JiraJSONResponse.java | 1572 | /* $Id$ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.manifoldcf.crawler.connectors.jira;
import org.json.simple.JSONObject;
/** An instance of this class represents a Jira JSON object, and the parser hooks
* needed to understand it.
*
* If we needed streaming anywhere, this would implement org.json.simple.parser.ContentHandler,
* where we would extract the data from a JSON event stream. But since we don't need that
* functionality, instead we're just going to accept an already-parsed JSONObject.
*
* This class is meant to be overridden (selectively) by derived classes.
*/
public class JiraJSONResponse {
protected Object object = null;
public JiraJSONResponse() {
}
/** Receive a parsed JSON object.
*/
public void acceptJSONObject(Object object) {
this.object = object;
}
}
| apache-2.0 |
milleruntime/accumulo | server/base/src/main/java/org/apache/accumulo/server/util/MetadataTableUtil.java | 28659 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.server.util;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.CLONED;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.DIR;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.FILES;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.LAST;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.LOCATION;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.LOGS;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.PREV_ROW;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.TIME;
import static org.apache.accumulo.fate.util.UtilWaitThread.sleepUninterruptibly;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.client.AccumuloClient;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.IsolatedScanner;
import org.apache.accumulo.core.client.MutationsRejectedException;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.admin.TimeType;
import org.apache.accumulo.core.clientImpl.BatchWriterImpl;
import org.apache.accumulo.core.clientImpl.Credentials;
import org.apache.accumulo.core.clientImpl.ScannerImpl;
import org.apache.accumulo.core.clientImpl.Writer;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.accumulo.core.metadata.MetadataTable;
import org.apache.accumulo.core.metadata.RootTable;
import org.apache.accumulo.core.metadata.StoredTabletFile;
import org.apache.accumulo.core.metadata.TabletFile;
import org.apache.accumulo.core.metadata.TabletFileUtil;
import org.apache.accumulo.core.metadata.schema.Ample;
import org.apache.accumulo.core.metadata.schema.Ample.TabletMutator;
import org.apache.accumulo.core.metadata.schema.DataFileValue;
import org.apache.accumulo.core.metadata.schema.ExternalCompactionId;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.BlipSection;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.BulkFileColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ChoppedColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ClonedColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.CurrentLocationColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.DataFileColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ExternalCompactionColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.LastLocationColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ServerColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.TabletColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataTime;
import org.apache.accumulo.core.metadata.schema.TabletDeletedException;
import org.apache.accumulo.core.metadata.schema.TabletMetadata;
import org.apache.accumulo.core.metadata.schema.TabletsMetadata;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.tabletserver.log.LogEntry;
import org.apache.accumulo.core.tabletserver.thrift.ConstraintViolationException;
import org.apache.accumulo.core.util.ColumnFQ;
import org.apache.accumulo.core.util.FastFormat;
import org.apache.accumulo.core.util.Pair;
import org.apache.accumulo.fate.FateTxId;
import org.apache.accumulo.fate.zookeeper.ServiceLock;
import org.apache.accumulo.server.ServerContext;
import org.apache.accumulo.server.gc.GcVolumeUtil;
import org.apache.hadoop.io.Text;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
/**
* provides a reference to the metadata table for updates by tablet servers
*/
public class MetadataTableUtil {
public static final Text EMPTY_TEXT = new Text();
private static Map<Credentials,Writer> root_tables = new HashMap<>();
private static Map<Credentials,Writer> metadata_tables = new HashMap<>();
private static final Logger log = LoggerFactory.getLogger(MetadataTableUtil.class);
private MetadataTableUtil() {}
public static synchronized Writer getMetadataTable(ServerContext context) {
Credentials credentials = context.getCredentials();
Writer metadataTable = metadata_tables.get(credentials);
if (metadataTable == null) {
metadataTable = new Writer(context, MetadataTable.ID);
metadata_tables.put(credentials, metadataTable);
}
return metadataTable;
}
public static synchronized Writer getRootTable(ServerContext context) {
Credentials credentials = context.getCredentials();
Writer rootTable = root_tables.get(credentials);
if (rootTable == null) {
rootTable = new Writer(context, RootTable.ID);
root_tables.put(credentials, rootTable);
}
return rootTable;
}
public static void putLockID(ServerContext context, ServiceLock zooLock, Mutation m) {
ServerColumnFamily.LOCK_COLUMN.put(m,
new Value(zooLock.getLockID().serialize(context.getZooKeeperRoot() + "/")));
}
private static void update(ServerContext context, Mutation m, KeyExtent extent) {
update(context, null, m, extent);
}
public static void update(ServerContext context, ServiceLock zooLock, Mutation m,
KeyExtent extent) {
Writer t = extent.isMeta() ? getRootTable(context) : getMetadataTable(context);
update(context, t, zooLock, m, extent);
}
public static void update(ServerContext context, Writer t, ServiceLock zooLock, Mutation m,
KeyExtent extent) {
if (zooLock != null)
putLockID(context, zooLock, m);
while (true) {
try {
t.update(m);
return;
} catch (AccumuloException | TableNotFoundException | AccumuloSecurityException e) {
logUpdateFailure(m, extent, e);
} catch (ConstraintViolationException e) {
logUpdateFailure(m, extent, e);
// retrying when a CVE occurs is probably futile and can cause problems, see ACCUMULO-3096
throw new RuntimeException(e);
}
sleepUninterruptibly(1, TimeUnit.SECONDS);
}
}
private static void logUpdateFailure(Mutation m, KeyExtent extent, Exception e) {
log.error("Failed to write metadata updates for extent {} {}", extent, m.prettyPrint(), e);
}
public static void updateTabletFlushID(KeyExtent extent, long flushID, ServerContext context,
ServiceLock zooLock) {
TabletMutator tablet = context.getAmple().mutateTablet(extent);
tablet.putFlushId(flushID);
tablet.putZooLock(zooLock);
tablet.mutate();
}
public static void updateTabletCompactID(KeyExtent extent, long compactID, ServerContext context,
ServiceLock zooLock) {
TabletMutator tablet = context.getAmple().mutateTablet(extent);
tablet.putCompactionId(compactID);
tablet.putZooLock(zooLock);
tablet.mutate();
}
public static Map<StoredTabletFile,DataFileValue> updateTabletDataFile(long tid, KeyExtent extent,
Map<TabletFile,DataFileValue> estSizes, MetadataTime time, ServerContext context,
ServiceLock zooLock) {
TabletMutator tablet = context.getAmple().mutateTablet(extent);
tablet.putTime(time);
Map<StoredTabletFile,DataFileValue> newFiles = new HashMap<>(estSizes.size());
estSizes.forEach((tf, dfv) -> {
tablet.putFile(tf, dfv);
tablet.putBulkFile(tf, tid);
newFiles.put(tf.insert(), dfv);
});
tablet.putZooLock(zooLock);
tablet.mutate();
return newFiles;
}
public static void updateTabletDir(KeyExtent extent, String newDir, ServerContext context,
ServiceLock zooLock) {
TabletMutator tablet = context.getAmple().mutateTablet(extent);
tablet.putDirName(newDir);
tablet.putZooLock(zooLock);
tablet.mutate();
}
public static void addTablet(KeyExtent extent, String path, ServerContext context,
TimeType timeType, ServiceLock zooLock) {
TabletMutator tablet = context.getAmple().mutateTablet(extent);
tablet.putPrevEndRow(extent.prevEndRow());
tablet.putDirName(path);
tablet.putTime(new MetadataTime(0, timeType));
tablet.putZooLock(zooLock);
tablet.mutate();
}
public static void updateTabletVolumes(KeyExtent extent, List<LogEntry> logsToRemove,
List<LogEntry> logsToAdd, List<StoredTabletFile> filesToRemove,
SortedMap<TabletFile,DataFileValue> filesToAdd, ServiceLock zooLock, ServerContext context) {
TabletMutator tabletMutator = context.getAmple().mutateTablet(extent);
logsToRemove.forEach(tabletMutator::deleteWal);
logsToAdd.forEach(tabletMutator::putWal);
filesToRemove.forEach(tabletMutator::deleteFile);
filesToAdd.forEach(tabletMutator::putFile);
tabletMutator.putZooLock(zooLock);
tabletMutator.mutate();
}
public static void rollBackSplit(Text metadataEntry, Text oldPrevEndRow, ServerContext context,
ServiceLock zooLock) {
KeyExtent ke = KeyExtent.fromMetaRow(metadataEntry, oldPrevEndRow);
Mutation m = TabletColumnFamily.createPrevRowMutation(ke);
TabletColumnFamily.SPLIT_RATIO_COLUMN.putDelete(m);
TabletColumnFamily.OLD_PREV_ROW_COLUMN.putDelete(m);
update(context, zooLock, m, KeyExtent.fromMetaRow(metadataEntry));
}
public static void splitTablet(KeyExtent extent, Text oldPrevEndRow, double splitRatio,
ServerContext context, ServiceLock zooLock, Set<ExternalCompactionId> ecids) {
Mutation m = TabletColumnFamily.createPrevRowMutation(extent);
TabletColumnFamily.SPLIT_RATIO_COLUMN.put(m, new Value(Double.toString(splitRatio)));
TabletColumnFamily.OLD_PREV_ROW_COLUMN.put(m,
TabletColumnFamily.encodePrevEndRow(oldPrevEndRow));
ChoppedColumnFamily.CHOPPED_COLUMN.putDelete(m);
ecids.forEach(ecid -> m.putDelete(ExternalCompactionColumnFamily.STR_NAME, ecid.canonical()));
update(context, zooLock, m, extent);
}
public static void finishSplit(Text metadataEntry,
Map<StoredTabletFile,DataFileValue> datafileSizes,
List<StoredTabletFile> highDatafilesToRemove, final ServerContext context,
ServiceLock zooLock) {
Mutation m = new Mutation(metadataEntry);
TabletColumnFamily.SPLIT_RATIO_COLUMN.putDelete(m);
TabletColumnFamily.OLD_PREV_ROW_COLUMN.putDelete(m);
ChoppedColumnFamily.CHOPPED_COLUMN.putDelete(m);
for (Entry<StoredTabletFile,DataFileValue> entry : datafileSizes.entrySet()) {
m.put(DataFileColumnFamily.NAME, entry.getKey().getMetaInsertText(),
new Value(entry.getValue().encode()));
}
for (StoredTabletFile pathToRemove : highDatafilesToRemove) {
m.putDelete(DataFileColumnFamily.NAME, pathToRemove.getMetaUpdateDeleteText());
}
update(context, zooLock, m, KeyExtent.fromMetaRow(metadataEntry));
}
public static void finishSplit(KeyExtent extent,
Map<StoredTabletFile,DataFileValue> datafileSizes,
List<StoredTabletFile> highDatafilesToRemove, ServerContext context, ServiceLock zooLock) {
finishSplit(extent.toMetaRow(), datafileSizes, highDatafilesToRemove, context, zooLock);
}
public static void removeScanFiles(KeyExtent extent, Set<StoredTabletFile> scanFiles,
ServerContext context, ServiceLock zooLock) {
TabletMutator tablet = context.getAmple().mutateTablet(extent);
scanFiles.forEach(tablet::deleteScan);
tablet.putZooLock(zooLock);
tablet.mutate();
}
public static void splitDatafiles(Text midRow, double splitRatio,
Map<TabletFile,FileUtil.FileInfo> firstAndLastRows,
SortedMap<StoredTabletFile,DataFileValue> datafiles,
SortedMap<StoredTabletFile,DataFileValue> lowDatafileSizes,
SortedMap<StoredTabletFile,DataFileValue> highDatafileSizes,
List<StoredTabletFile> highDatafilesToRemove) {
for (Entry<StoredTabletFile,DataFileValue> entry : datafiles.entrySet()) {
Text firstRow = null;
Text lastRow = null;
boolean rowsKnown = false;
FileUtil.FileInfo mfi = firstAndLastRows.get(entry.getKey());
if (mfi != null) {
firstRow = mfi.getFirstRow();
lastRow = mfi.getLastRow();
rowsKnown = true;
}
if (rowsKnown && firstRow.compareTo(midRow) > 0) {
// only in high
long highSize = entry.getValue().getSize();
long highEntries = entry.getValue().getNumEntries();
highDatafileSizes.put(entry.getKey(),
new DataFileValue(highSize, highEntries, entry.getValue().getTime()));
} else if (rowsKnown && lastRow.compareTo(midRow) <= 0) {
// only in low
long lowSize = entry.getValue().getSize();
long lowEntries = entry.getValue().getNumEntries();
lowDatafileSizes.put(entry.getKey(),
new DataFileValue(lowSize, lowEntries, entry.getValue().getTime()));
highDatafilesToRemove.add(entry.getKey());
} else {
long lowSize = (long) Math.floor((entry.getValue().getSize() * splitRatio));
long lowEntries = (long) Math.floor((entry.getValue().getNumEntries() * splitRatio));
lowDatafileSizes.put(entry.getKey(),
new DataFileValue(lowSize, lowEntries, entry.getValue().getTime()));
long highSize = (long) Math.ceil((entry.getValue().getSize() * (1.0 - splitRatio)));
long highEntries =
(long) Math.ceil((entry.getValue().getNumEntries() * (1.0 - splitRatio)));
highDatafileSizes.put(entry.getKey(),
new DataFileValue(highSize, highEntries, entry.getValue().getTime()));
}
}
}
public static void deleteTable(TableId tableId, boolean insertDeletes, ServerContext context,
ServiceLock lock) throws AccumuloException {
try (Scanner ms = new ScannerImpl(context, MetadataTable.ID, Authorizations.EMPTY);
BatchWriter bw = new BatchWriterImpl(context, MetadataTable.ID,
new BatchWriterConfig().setMaxMemory(1000000)
.setMaxLatency(120000L, TimeUnit.MILLISECONDS).setMaxWriteThreads(2))) {
// scan metadata for our table and delete everything we find
Mutation m = null;
Ample ample = context.getAmple();
ms.setRange(new KeyExtent(tableId, null, null).toMetaRange());
// insert deletes before deleting data from metadata... this makes the code fault tolerant
if (insertDeletes) {
ms.fetchColumnFamily(DataFileColumnFamily.NAME);
ServerColumnFamily.DIRECTORY_COLUMN.fetch(ms);
for (Entry<Key,Value> cell : ms) {
Key key = cell.getKey();
if (key.getColumnFamily().equals(DataFileColumnFamily.NAME)) {
String ref = TabletFileUtil.validate(key.getColumnQualifierData().toString());
bw.addMutation(ample.createDeleteMutation(ref));
}
if (ServerColumnFamily.DIRECTORY_COLUMN.hasColumns(key)) {
String uri =
GcVolumeUtil.getDeleteTabletOnAllVolumesUri(tableId, cell.getValue().toString());
bw.addMutation(ample.createDeleteMutation(uri));
}
}
bw.flush();
ms.clearColumns();
}
for (Entry<Key,Value> cell : ms) {
Key key = cell.getKey();
if (m == null) {
m = new Mutation(key.getRow());
if (lock != null)
putLockID(context, lock, m);
}
if (key.getRow().compareTo(m.getRow(), 0, m.getRow().length) != 0) {
bw.addMutation(m);
m = new Mutation(key.getRow());
if (lock != null)
putLockID(context, lock, m);
}
m.putDelete(key.getColumnFamily(), key.getColumnQualifier());
}
if (m != null)
bw.addMutation(m);
}
}
public static Pair<List<LogEntry>,SortedMap<StoredTabletFile,DataFileValue>>
getFileAndLogEntries(ServerContext context, KeyExtent extent) throws IOException {
ArrayList<LogEntry> result = new ArrayList<>();
TreeMap<StoredTabletFile,DataFileValue> sizes = new TreeMap<>();
TabletMetadata tablet = context.getAmple().readTablet(extent, FILES, LOGS, PREV_ROW, DIR);
if (tablet == null) {
throw new RuntimeException("Tablet " + extent + " not found in metadata");
}
result.addAll(tablet.getLogs());
tablet.getFilesMap().forEach(sizes::put);
return new Pair<>(result, sizes);
}
public static void removeUnusedWALEntries(ServerContext context, KeyExtent extent,
final List<LogEntry> entries, ServiceLock zooLock) {
TabletMutator tablet = context.getAmple().mutateTablet(extent);
entries.forEach(tablet::deleteWal);
tablet.putZooLock(zooLock);
tablet.mutate();
}
private static Mutation createCloneMutation(TableId srcTableId, TableId tableId,
Map<Key,Value> tablet) {
KeyExtent ke = KeyExtent.fromMetaRow(tablet.keySet().iterator().next().getRow());
Mutation m = new Mutation(TabletsSection.encodeRow(tableId, ke.endRow()));
for (Entry<Key,Value> entry : tablet.entrySet()) {
if (entry.getKey().getColumnFamily().equals(DataFileColumnFamily.NAME)) {
String cf = entry.getKey().getColumnQualifier().toString();
if (!cf.startsWith("../") && !cf.contains(":"))
cf = "../" + srcTableId + entry.getKey().getColumnQualifier();
m.put(entry.getKey().getColumnFamily(), new Text(cf), entry.getValue());
} else if (entry.getKey().getColumnFamily().equals(CurrentLocationColumnFamily.NAME)) {
m.put(LastLocationColumnFamily.NAME, entry.getKey().getColumnQualifier(), entry.getValue());
} else if (entry.getKey().getColumnFamily().equals(LastLocationColumnFamily.NAME)) {
// skip
} else {
m.put(entry.getKey().getColumnFamily(), entry.getKey().getColumnQualifier(),
entry.getValue());
}
}
return m;
}
private static Iterable<TabletMetadata> createCloneScanner(String testTableName, TableId tableId,
AccumuloClient client) throws TableNotFoundException {
String tableName;
Range range;
if (testTableName != null) {
tableName = testTableName;
range = TabletsSection.getRange(tableId);
} else if (tableId.equals(MetadataTable.ID)) {
tableName = RootTable.NAME;
range = TabletsSection.getRange();
} else {
tableName = MetadataTable.NAME;
range = TabletsSection.getRange(tableId);
}
return TabletsMetadata.builder(client).scanTable(tableName).overRange(range).checkConsistency()
.saveKeyValues().fetch(FILES, LOCATION, LAST, CLONED, PREV_ROW, TIME).build();
}
@VisibleForTesting
public static void initializeClone(String testTableName, TableId srcTableId, TableId tableId,
AccumuloClient client, BatchWriter bw)
throws TableNotFoundException, MutationsRejectedException {
Iterator<TabletMetadata> ti = createCloneScanner(testTableName, srcTableId, client).iterator();
if (!ti.hasNext())
throw new RuntimeException(" table deleted during clone? srcTableId = " + srcTableId);
while (ti.hasNext())
bw.addMutation(createCloneMutation(srcTableId, tableId, ti.next().getKeyValues()));
bw.flush();
}
private static int compareEndRows(Text endRow1, Text endRow2) {
return new KeyExtent(TableId.of("0"), endRow1, null)
.compareTo(new KeyExtent(TableId.of("0"), endRow2, null));
}
@VisibleForTesting
public static int checkClone(String testTableName, TableId srcTableId, TableId tableId,
AccumuloClient client, BatchWriter bw)
throws TableNotFoundException, MutationsRejectedException {
Iterator<TabletMetadata> srcIter =
createCloneScanner(testTableName, srcTableId, client).iterator();
Iterator<TabletMetadata> cloneIter =
createCloneScanner(testTableName, tableId, client).iterator();
if (!cloneIter.hasNext() || !srcIter.hasNext())
throw new RuntimeException(
" table deleted during clone? srcTableId = " + srcTableId + " tableId=" + tableId);
int rewrites = 0;
while (cloneIter.hasNext()) {
TabletMetadata cloneTablet = cloneIter.next();
Text cloneEndRow = cloneTablet.getEndRow();
HashSet<TabletFile> cloneFiles = new HashSet<>();
boolean cloneSuccessful = cloneTablet.getCloned() != null;
if (!cloneSuccessful)
cloneFiles.addAll(cloneTablet.getFiles());
List<TabletMetadata> srcTablets = new ArrayList<>();
TabletMetadata srcTablet = srcIter.next();
srcTablets.add(srcTablet);
Text srcEndRow = srcTablet.getEndRow();
int cmp = compareEndRows(cloneEndRow, srcEndRow);
if (cmp < 0)
throw new TabletDeletedException(
"Tablets deleted from src during clone : " + cloneEndRow + " " + srcEndRow);
HashSet<TabletFile> srcFiles = new HashSet<>();
if (!cloneSuccessful)
srcFiles.addAll(srcTablet.getFiles());
while (cmp > 0) {
srcTablet = srcIter.next();
srcTablets.add(srcTablet);
srcEndRow = srcTablet.getEndRow();
cmp = compareEndRows(cloneEndRow, srcEndRow);
if (cmp < 0)
throw new TabletDeletedException(
"Tablets deleted from src during clone : " + cloneEndRow + " " + srcEndRow);
if (!cloneSuccessful)
srcFiles.addAll(srcTablet.getFiles());
}
if (cloneSuccessful)
continue;
if (srcFiles.containsAll(cloneFiles)) {
// write out marker that this tablet was successfully cloned
Mutation m = new Mutation(cloneTablet.getExtent().toMetaRow());
m.put(ClonedColumnFamily.NAME, new Text(""), new Value("OK"));
bw.addMutation(m);
} else {
// delete existing cloned tablet entry
Mutation m = new Mutation(cloneTablet.getExtent().toMetaRow());
for (Entry<Key,Value> entry : cloneTablet.getKeyValues().entrySet()) {
Key k = entry.getKey();
m.putDelete(k.getColumnFamily(), k.getColumnQualifier(), k.getTimestamp());
}
bw.addMutation(m);
for (TabletMetadata st : srcTablets)
bw.addMutation(createCloneMutation(srcTableId, tableId, st.getKeyValues()));
rewrites++;
}
}
bw.flush();
return rewrites;
}
public static void cloneTable(ServerContext context, TableId srcTableId, TableId tableId)
throws Exception {
try (BatchWriter bw = context.createBatchWriter(MetadataTable.NAME)) {
while (true) {
try {
initializeClone(null, srcTableId, tableId, context, bw);
// the following loop looks changes in the file that occurred during the copy.. if files
// were dereferenced then they could have been GCed
while (true) {
int rewrites = checkClone(null, srcTableId, tableId, context, bw);
if (rewrites == 0)
break;
}
bw.flush();
break;
} catch (TabletDeletedException tde) {
// tablets were merged in the src table
bw.flush();
// delete what we have cloned and try again
deleteTable(tableId, false, context, null);
log.debug("Tablets merged in table {} while attempting to clone, trying again",
srcTableId);
sleepUninterruptibly(100, TimeUnit.MILLISECONDS);
}
}
// delete the clone markers and create directory entries
Scanner mscanner = context.createScanner(MetadataTable.NAME, Authorizations.EMPTY);
mscanner.setRange(new KeyExtent(tableId, null, null).toMetaRange());
mscanner.fetchColumnFamily(ClonedColumnFamily.NAME);
int dirCount = 0;
for (Entry<Key,Value> entry : mscanner) {
Key k = entry.getKey();
Mutation m = new Mutation(k.getRow());
m.putDelete(k.getColumnFamily(), k.getColumnQualifier());
byte[] dirName =
FastFormat.toZeroPaddedString(dirCount++, 8, 16, Constants.CLONE_PREFIX_BYTES);
ServerColumnFamily.DIRECTORY_COLUMN.put(m, new Value(dirName));
bw.addMutation(m);
}
}
}
public static void chopped(ServerContext context, KeyExtent extent, ServiceLock zooLock) {
TabletMutator tablet = context.getAmple().mutateTablet(extent);
tablet.putChopped();
tablet.putZooLock(zooLock);
tablet.mutate();
}
public static void removeBulkLoadEntries(AccumuloClient client, TableId tableId, long tid)
throws Exception {
try (
Scanner mscanner =
new IsolatedScanner(client.createScanner(MetadataTable.NAME, Authorizations.EMPTY));
BatchWriter bw = client.createBatchWriter(MetadataTable.NAME)) {
mscanner.setRange(new KeyExtent(tableId, null, null).toMetaRange());
mscanner.fetchColumnFamily(BulkFileColumnFamily.NAME);
for (Entry<Key,Value> entry : mscanner) {
log.trace("Looking at entry {} with tid {}", entry, tid);
long entryTid = BulkFileColumnFamily.getBulkLoadTid(entry.getValue());
if (tid == entryTid) {
log.trace("deleting entry {}", entry);
Key key = entry.getKey();
Mutation m = new Mutation(key.getRow());
m.putDelete(key.getColumnFamily(), key.getColumnQualifier());
bw.addMutation(m);
}
}
}
}
public static void addBulkLoadInProgressFlag(ServerContext context, String path, long fateTxid) {
Mutation m = new Mutation(BlipSection.getRowPrefix() + path);
m.put(EMPTY_TEXT, EMPTY_TEXT, new Value(FateTxId.formatTid(fateTxid)));
// new KeyExtent is only added to force update to write to the metadata table, not the root
// table
// because bulk loads aren't supported to the metadata table
update(context, m, new KeyExtent(TableId.of("anythingNotMetadata"), null, null));
}
public static void removeBulkLoadInProgressFlag(ServerContext context, String path) {
Mutation m = new Mutation(BlipSection.getRowPrefix() + path);
m.putDelete(EMPTY_TEXT, EMPTY_TEXT);
// new KeyExtent is only added to force update to write to the metadata table, not the root
// table
// because bulk loads aren't supported to the metadata table
update(context, m, new KeyExtent(TableId.of("anythingNotMetadata"), null, null));
}
public static SortedMap<Text,SortedMap<ColumnFQ,Value>>
getTabletEntries(SortedMap<Key,Value> tabletKeyValues, List<ColumnFQ> columns) {
TreeMap<Text,SortedMap<ColumnFQ,Value>> tabletEntries = new TreeMap<>();
HashSet<ColumnFQ> colSet = columns == null ? null : new HashSet<>(columns);
tabletKeyValues.forEach((key, val) -> {
ColumnFQ currentKey = new ColumnFQ(key);
if (columns == null || colSet.contains(currentKey)) {
tabletEntries.computeIfAbsent(key.getRow(), k -> new TreeMap<>()).put(currentKey, val);
}
});
return tabletEntries;
}
}
| apache-2.0 |
FundingCircle/secor | src/main/java/com/pinterest/secor/parser/DateMessageParser.java | 3161 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.secor.parser;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.pinterest.secor.common.SecorConfig;
import com.pinterest.secor.message.Message;
import net.minidev.json.JSONObject;
import net.minidev.json.JSONValue;
/**
* DateMessageParser extracts timestamp field (specified by 'message.timestamp.name')
* and the date pattern (specified by 'message.timestamp.input.pattern')
*
* @see http://docs.oracle.com/javase/6/docs/api/java/text/SimpleDateFormat.html
*
* @author Lucas Zago (lucaszago@gmail.com)
*
*/
public class DateMessageParser extends MessageParser {
private static final Logger LOG = LoggerFactory.getLogger(DateMessageParser.class);
protected static final String defaultDate = "dt=1970-01-01";
protected static final String defaultFormatter = "yyyy-MM-dd";
protected SimpleDateFormat outputFormatter = new SimpleDateFormat(defaultFormatter);
protected Object inputPattern;
protected SimpleDateFormat inputFormatter;
public DateMessageParser(SecorConfig config) {
super(config);
TimeZone timeZone = config.getTimeZone();
inputPattern = mConfig.getMessageTimestampInputPattern();
inputFormatter = new SimpleDateFormat(inputPattern.toString());
inputFormatter.setTimeZone(timeZone);
outputFormatter.setTimeZone(timeZone);
}
@Override
public String[] extractPartitions(Message message) {
JSONObject jsonObject = (JSONObject) JSONValue.parse(message.getPayload());
String result[] = { defaultDate };
if (jsonObject != null) {
Object fieldValue = getJsonFieldValue(jsonObject);
if (fieldValue != null && inputPattern != null) {
try {
Date dateFormat = inputFormatter.parse(fieldValue.toString());
result[0] = "dt=" + outputFormatter.format(dateFormat);
return result;
} catch (Exception e) {
LOG.warn("Impossible to convert date = {} for the input pattern = {} . Using date default = {}",
fieldValue.toString(), inputPattern.toString(), result[0]);
}
}
}
return result;
}
}
| apache-2.0 |
jmluy/elasticsearch | x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/AnalyzeTests.java | 4697 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.security.authz;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.elasticsearch.test.SecuritySettingsSourceField;
import java.util.Collections;
import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationException;
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER;
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
public class AnalyzeTests extends SecurityIntegTestCase {
@Override
protected String configUsers() {
final String usersPasswdHashed = new String(
getFastStoredHashAlgoForTests().hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)
);
return super.configUsers() + "analyze_indices:" + usersPasswdHashed + "\n" + "analyze_cluster:" + usersPasswdHashed + "\n";
}
@Override
protected String configUsersRoles() {
return super.configUsersRoles() + "analyze_indices:analyze_indices\n" + "analyze_cluster:analyze_cluster\n";
}
@Override
protected String configRoles() {
return super.configRoles() + "\n" +
// role that has analyze indices privileges only
"analyze_indices:\n"
+ " indices:\n"
+ " - names: 'test_*'\n"
+ " privileges: [ 'indices:admin/analyze' ]\n"
+ "analyze_cluster:\n"
+ " cluster:\n"
+ " - cluster:admin/analyze\n";
}
public void testAnalyzeWithIndices() {
// this test tries to execute different analyze api variants from a user that has analyze privileges only on a specific index
// namespace
createIndex("test_1");
ensureGreen();
// ok: user has permissions for analyze on test_*
SecureString passwd = SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING;
client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("analyze_indices", passwd)))
.admin()
.indices()
.prepareAnalyze("this is my text")
.setIndex("test_1")
.setAnalyzer("standard")
.get();
// fails: user doesn't have permissions for analyze on index non_authorized
assertThrowsAuthorizationException(
client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("analyze_indices", passwd)))
.admin()
.indices()
.prepareAnalyze("this is my text")
.setIndex("non_authorized")
.setAnalyzer("standard")::get,
AnalyzeAction.NAME,
"analyze_indices"
);
// fails: user doesn't have permissions for cluster level analyze
assertThrowsAuthorizationException(
client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("analyze_indices", passwd)))
.admin()
.indices()
.prepareAnalyze("this is my text")
.setAnalyzer("standard")::get,
"cluster:admin/analyze",
"analyze_indices"
);
}
public void testAnalyzeWithoutIndices() {
// this test tries to execute different analyze api variants from a user that has analyze privileges only at cluster level
SecureString passwd = SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING;
// fails: user doesn't have permissions for analyze on index test_1
assertThrowsAuthorizationException(
client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("analyze_cluster", passwd)))
.admin()
.indices()
.prepareAnalyze("this is my text")
.setIndex("test_1")
.setAnalyzer("standard")::get,
AnalyzeAction.NAME,
"analyze_cluster"
);
client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("analyze_cluster", passwd)))
.admin()
.indices()
.prepareAnalyze("this is my text")
.setAnalyzer("standard")
.get();
}
}
| apache-2.0 |
smanvi-pivotal/geode | geode-core/src/main/java/org/apache/geode/internal/sequencelog/GraphType.java | 1631 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.sequencelog;
import java.util.EnumSet;
/**
*
*/
public enum GraphType {
REGION, KEY, MESSAGE, MEMBER;
public byte getId() {
return (byte) this.ordinal();
}
public static GraphType getType(byte id) {
return values()[id];
}
public static EnumSet<GraphType> parse(String enabledTypesString) {
EnumSet<GraphType> set = EnumSet.noneOf(GraphType.class);
if (enabledTypesString.contains("region")) {
set.add(REGION);
}
if (enabledTypesString.contains("key")) {
set.add(KEY);
}
if (enabledTypesString.contains("message")) {
set.add(MESSAGE);
}
if (enabledTypesString.contains("member")) {
set.add(MEMBER);
}
if (enabledTypesString.contains("all")) {
set = EnumSet.allOf(GraphType.class);
}
return set;
}
}
| apache-2.0 |
adufilie/flex-sdk | modules/asc/src/java/macromedia/asc/embedding/avmplus/ClassBuilder.java | 10369 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package macromedia.asc.embedding.avmplus;
import macromedia.asc.util.*;
import macromedia.asc.semantics.*;
import static macromedia.asc.semantics.Slot.*;
import static macromedia.asc.parser.Tokens.*;
import java.util.Iterator;
/**
* A class object is a factory for instances of that class.
*
* A class object has two special internal properties: prototype, and
* instance traits. The class object uses these two properties to
* initialize the instances it creates.
*
* Like other objects, a class object has its own traits object.
* That traits object contains the names of the class's static
* defintions. Those names have bindings to global slots and global
* methods. Class objects have no instance slots.
*
* There a various kinds of bindings that can be added to an object:
* + local slot
* + local method
* + local accessor pair
* + global slot
* + global method
* + global accessor pair
* The class object builder adds the global versions of these
* bindings to the class object symbol table. Specifically,
* class A {
* static var x // adds a global slot binding
* static function f() {} // adds a global method binding
* static function get y() {} // adds a global get accessor
* static function set y(v) {} // adds a global set accessor
* }
*/
public class ClassBuilder extends Builder
{
public ClassBuilder basebui;
public boolean is_interface;
public ObjectValue protected_namespace; // alias to namespace created by CDN
public ObjectValue static_protected_namespace; // alias to namespace created by CDN
public ClassBuilder(QName classname, ObjectValue protected_namespace, ObjectValue static_protected_namespace)
{
basebui = null;
is_interface = false;
this.classname = classname;
this.protected_namespace = protected_namespace;
this.static_protected_namespace = static_protected_namespace;
}
public void build(Context cx, ObjectValue ob)
{
objectValue = ob;
contextId = cx.getId();
if( "Class".equals(classname.toString()) )
{
return;
}
var_offset = 0;
reg_offset = 1; // this, in cinit
// private : Attribute
Namespaces nss = new Namespaces(1);
nss.push_back(cx.publicNamespace());
// use up the first two disp_id's because Class has two instance methods (get/set prototype)
// ISSUE is there a way to do this from the actual definition of Class instead of hardcoding?
// FIXME: move these to a shared base class?
int meth_id = Method(cx, ob, "prototype$get", nss, false);
ExplicitGet(cx, ob, "prototype", nss, null, true, false, -1, meth_id, -1);
Method(cx, ob, "prototype$set", nss, false);
}
public int Variable( Context cx, ObjectValue ob )
{
// If building an intrinsic instance, then do nothing here.
if( is_intrinsic /*|| basebui!=0 && basebui->is_intrinsic*/ )
{
return -1;
}
// front-end
int var_id = super.Variable(cx,ob);
// back-end
return var_id;
}
public int Method(Context cx, ObjectValue ob, final String name, Namespaces namespaces, boolean is_intrinsic )
{
if( this.is_intrinsic || is_intrinsic )
{
return -1;
}
return GetMethodId(cx,name,namespaces);
}
public void ImplicitCall( Context cx, ObjectValue ob, int slot_id, TypeValue type, int call_seq, int method_id, int expected_id )
{
// Do the frontend binding
super.ImplicitCall(cx,ob,slot_id,type,call_seq,method_id,expected_id);
}
public int ExplicitGet( Context cx, ObjectValue ob, String name, Namespaces namespaces, TypeValue type, boolean is_final, boolean is_override )
{
return ExplicitGet(cx, ob, name, namespaces, type, is_final, is_override, -1, -1, -1);
}
public int ExplicitGet( Context cx, ObjectValue ob, String name, Namespaces namespaces, TypeValue type, boolean is_final, boolean is_override, int expected_id , int method_id , int var_id )
{
int slot_id = ob.addMethodSlot(cx,type);
CHECK_SLOT_INDEX(expected_id,slot_id);
ob.defineNames(cx,GET_TOKEN,name,namespaces,slot_id);
Slot slot = ob.getSlot(cx,slot_id);
slot.attrs(CALL_ThisMethod,method_id);
slot.setFinal(is_final);
slot.setOverride(is_override);
if( is_intrinsic || basebui!=null && basebui.is_intrinsic )
{
slot.setMethodID(-1); // erase the method_id
}
// do backend binding
// if( method_id >= 0 )
{
for (ObjectValue it : namespaces)
{
Name(cx,GET_TOKEN,name,it);
}
}
return slot_id;
}
public int ExplicitSet( Context cx, ObjectValue ob, String name, ObjectValue ns, TypeValue type, boolean is_final, boolean is_override, int expected_id)
{
return ExplicitSet(cx, ob, name, ns, type, is_final, is_override, expected_id, -1, -1);
}
public int ExplicitSet( Context cx, ObjectValue ob, String name, ObjectValue ns, TypeValue type, boolean is_final, boolean is_override, int expected_id, int method_id , int var_id )
{
int slot_id = ob.addMethodSlot(cx,type);
CHECK_SLOT_INDEX(expected_id,slot_id);
ob.defineName(cx,SET_TOKEN,name,ns,slot_id);
Slot slot = ob.getSlot(cx,slot_id);
slot.attrs(CALL_ThisMethod,method_id);
slot.setFinal(is_final);
slot.setOverride(is_override);
if( is_intrinsic || basebui!=null && basebui.is_intrinsic )
{
slot.setMethodID(-1); // erase the method_id
}
// if( method_id >= 0 )
{
Name(cx,SET_TOKEN,name,ns);
}
return slot_id;
}
public int ExplicitSet( Context cx, ObjectValue ob, String name, Namespaces namespaces, TypeValue type, boolean is_final, boolean is_override, int expected_id)
{
return ExplicitSet(cx, ob, name, namespaces, type, is_final, is_override, expected_id, -1, -1);
}
public int ExplicitSet( Context cx, ObjectValue ob, String name, Namespaces namespaces, TypeValue type, boolean is_final, boolean is_override, int expected_id, int method_id , int var_id )
{
int slot_id = ob.addMethodSlot(cx,type);
CHECK_SLOT_INDEX(expected_id,slot_id);
ob.defineNames(cx,SET_TOKEN,name,namespaces,slot_id);
Slot slot = ob.getSlot(cx,slot_id);
slot.attrs(CALL_ThisMethod,method_id);
slot.setFinal(is_final);
slot.setOverride(is_override);
if( is_intrinsic || basebui!=null && basebui.is_intrinsic )
{
slot.setMethodID(-1); // erase the method_id
}
// do backend binding
// if( method_id >= 0 )
{
for (Iterator<ObjectValue> it = namespaces.iterator(); it.hasNext();)
{
Name(cx,SET_TOKEN,name,it.next());
}
}
return slot_id;
}
public int ExplicitVar( Context cx, ObjectValue ob, String name, ObjectValue ns, TypeValue type, int expected_id)
{
return ExplicitVar(cx, ob, name, ns, type, expected_id, -1, -1);
}
public int ExplicitVar( Context cx, ObjectValue ob, String name, ObjectValue ns, TypeValue type, int expected_id, int method_id , int var_id )
{
int slot_id = ob.addVariableSlot(cx,type,var_id);
ob.getSlot(cx,slot_id).addType(type.getDefaultTypeInfo());
CHECK_SLOT_INDEX(expected_id,slot_id);
ob.defineName(cx,GET_TOKEN,name,ns,slot_id);
Slot slot = ob.getSlot(cx,slot_id);
slot.attrs(CALL_ThisMethod,method_id);
if( is_intrinsic || basebui!=null && basebui.is_intrinsic )
{
slot.setVarIndex(-1); // erase the var_index
}
// if( var_id >= 0 )
{
Name(cx,VAR_TOKEN,name,ns);
}
return slot_id;
}
public int ExplicitVar( Context cx, ObjectValue ob, String name, Namespaces namespaces, TypeValue type, int expected_id)
{
return ExplicitVar(cx, ob, name, namespaces, type, expected_id, -1, -1);
}
public int ExplicitVar( Context cx, ObjectValue ob, String name, Namespaces namespaces, TypeValue type, int expected_id, int method_id , int var_id )
{
int slot_id = ob.addVariableSlot(cx,type,var_id);
ob.getSlot(cx,slot_id).addType(type.getDefaultTypeInfo());
CHECK_SLOT_INDEX(expected_id,slot_id);
ob.defineNames(cx,GET_TOKEN,name,namespaces,slot_id);
ob.defineNames(cx,SET_TOKEN,name,namespaces,slot_id);
Slot slot = ob.getSlot(cx,slot_id);
slot.attrs(CALL_ThisMethod,method_id);
if( is_intrinsic || basebui!=null && basebui.is_intrinsic )
{
slot.setVarIndex(-1); // erase the var_index
}
// do backend binding
// if( var_id >= 0 )
{
for (Iterator<ObjectValue> it = namespaces.iterator();it.hasNext();)
{
Name(cx,VAR_TOKEN,name,it.next());
}
}
return slot_id;
}
public int ExplicitCall( Context cx, ObjectValue ob, String name, Namespaces namespaces, TypeValue type, boolean is_final, boolean is_override, int expected_id, int method_id , int var_id )
{
// Do the frontend binding
int slot_id = super.ExplicitGet(cx,ob,name,namespaces,cx.functionType(),true,false,expected_id,method_id,var_id);
ob.getSlot(cx, slot_id).setGetter(false);
int implied_id = ob.addSlotImplicit(cx,slot_id,EMPTY_TOKEN,type); // ISSUE: clean up
Slot slot = ob.getSlot(cx,implied_id);
ob.getSlot(cx,implied_id).attrs(CALL_ThisMethod,method_id);
slot.setFinal(is_final);
slot.setOverride(is_override);
slot.setMethodName(classname+"$"+name);
slot.setGetter(false);
// this isn't right
if( is_intrinsic || basebui!=null && basebui.is_intrinsic )
{
slot.setMethodID(-1); // erase the method_id
}
// Do the backend binding
// if( method_id >= 0 )
{
for (Iterator<ObjectValue> it = namespaces.iterator();it.hasNext();)
{
Name(cx,EMPTY_TOKEN,name,it.next());
}
}
return slot_id;
}
}
| apache-2.0 |
stoksey69/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201408/OperatingSystemTargeting.java | 2721 |
package com.google.api.ads.dfp.jaxws.v201408;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
*
* Represents operating systems that are being targeted or excluded by the
* {@link LineItem}.
*
*
* <p>Java class for OperatingSystemTargeting complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="OperatingSystemTargeting">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="isTargeted" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="operatingSystems" type="{https://www.google.com/apis/ads/publisher/v201408}Technology" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "OperatingSystemTargeting", propOrder = {
"isTargeted",
"operatingSystems"
})
public class OperatingSystemTargeting {
protected Boolean isTargeted;
protected List<Technology> operatingSystems;
/**
* Gets the value of the isTargeted property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isIsTargeted() {
return isTargeted;
}
/**
* Sets the value of the isTargeted property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setIsTargeted(Boolean value) {
this.isTargeted = value;
}
/**
* Gets the value of the operatingSystems property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the operatingSystems property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getOperatingSystems().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Technology }
*
*
*/
public List<Technology> getOperatingSystems() {
if (operatingSystems == null) {
operatingSystems = new ArrayList<Technology>();
}
return this.operatingSystems;
}
}
| apache-2.0 |
alina-ipatina/pentaho-kettle | test/org/pentaho/di/trans/steps/update/UpdateTest.java | 14458 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.update;
import static org.junit.Assert.assertArrayEquals;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaInteger;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.trans.RowProducer;
import org.pentaho.di.trans.RowStepCollector;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransHopMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.steps.injector.InjectorMeta;
import junit.framework.TestCase;
public class UpdateTest extends TestCase {
public static final String[] databasesXML = {
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<connection>" + "<name>db</name>" + "<server>127.0.0.1</server>" + "<type>H2</type>"
+ "<access>Native</access>" + "<database>mem:db</database>" + "<port></port>" + "<username>sa</username>"
+ "<password></password>" + "</connection>", };
public static final String TARGET_TABLE = "update_step_test_case_table";
private static String[] insertStatement = {
// New rows for the source
"INSERT INTO " + TARGET_TABLE + "(ID, CODE, VALUE, ROW_ORDER) " + "VALUES (NULL, NULL, 'null_id_code', 1)",
"INSERT INTO " + TARGET_TABLE + "(ID, CODE, VALUE, ROW_ORDER) " + "VALUES (NULL, 1, 'null_id', 2)",
"INSERT INTO " + TARGET_TABLE + "(ID, CODE, VALUE, ROW_ORDER) " + "VALUES (1, NULL, 'null_code', 3)",
"INSERT INTO " + TARGET_TABLE + "(ID, CODE, VALUE, ROW_ORDER) " + "VALUES (2, 2, 'non_null_keys', 4)",
};
// this points to the transformation
Trans trans;
// this points to the update step being tested
public UpdateMeta upd;
// these are used to write and read rows in the test transformation
public RowStepCollector rc;
public RowProducer rp;
// the database used for the transformation run
public Database db;
// returns the structure of the target table
public RowMetaInterface getTargetTableRowMeta() {
RowMetaInterface rm = new RowMeta();
ValueMetaInterface[] valuesMeta =
{
new ValueMetaInteger( "ID", 8, 0 ),
new ValueMetaInteger( "CODE", 8, 0 ),
new ValueMetaString( "VALUE", 255, 0 ),
new ValueMetaInteger( "ROW_ORDER", 8, 0 ), };
for ( int i = 0; i < valuesMeta.length; i++ ) {
rm.addValueMeta( valuesMeta[i] );
}
return rm;
}
// adds lookup key line definition to the update step
// input is in format {key, condition, stream, stream2}
public void addLookup( String[] def ) {
// make sure to initialize the step
if ( upd.getKeyLookup() == null ) {
upd.setKeyLookup( new String[0] );
upd.setKeyCondition( new String[0] );
upd.setKeyStream( new String[0] );
upd.setKeyStream2( new String[0] );
}
int newLength = upd.getKeyLookup().length + 1;
ArrayList<String> newKeyLookup = new ArrayList<String>( newLength );
newKeyLookup.addAll( Arrays.asList( upd.getKeyLookup() ) );
newKeyLookup.add( def[0] );
upd.setKeyLookup( newKeyLookup.toArray( new String[0] ) );
ArrayList<String> newKeyCondition = new ArrayList<String>( newLength );
newKeyCondition.addAll( Arrays.asList( upd.getKeyCondition() ) );
newKeyCondition.add( def[1] );
upd.setKeyCondition( newKeyCondition.toArray( new String[0] ) );
ArrayList<String> newKeyStream = new ArrayList<String>( newLength );
newKeyStream.addAll( Arrays.asList( upd.getKeyStream() ) );
newKeyStream.add( def[2] );
upd.setKeyStream( newKeyStream.toArray( new String[0] ) );
ArrayList<String> newKeyStream2 = new ArrayList<String>( newLength );
newKeyStream2.addAll( Arrays.asList( upd.getKeyStream2() ) );
newKeyStream2.add( def[3] );
upd.setKeyStream2( newKeyStream2.toArray( new String[0] ) );
}
@Override
@Before
public void setUp() throws Exception {
KettleEnvironment.init();
/* SET UP TRANSFORMATION */
// Create a new transformation...
TransMeta transMeta = new TransMeta();
transMeta.setName( "update test" );
// Add the database connections
for ( int i = 0; i < databasesXML.length; i++ ) {
DatabaseMeta databaseMeta = new DatabaseMeta( databasesXML[i] );
transMeta.addDatabase( databaseMeta );
}
DatabaseMeta dbInfo = transMeta.findDatabase( "db" );
/* SET UP DATABASE */
// Create target table
db = new Database( transMeta, dbInfo );
db.connect();
String source = db.getCreateTableStatement( TARGET_TABLE, getTargetTableRowMeta(), null, false, null, true );
db.execStatement( source );
// populate target table
for ( String sql : insertStatement ) {
db.execStatement( sql );
}
/* SET UP TRANSFORMATION STEPS */
PluginRegistry registry = PluginRegistry.getInstance();
// create an injector step...
String injectorStepName = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId( StepPluginType.class, im );
StepMeta injectorStep = new StepMeta( injectorPid, injectorStepName, im );
transMeta.addStep( injectorStep );
// create the update step...
String updateStepName = "update [" + TARGET_TABLE + "]";
upd = new UpdateMeta();
upd.setDatabaseMeta( transMeta.findDatabase( "db" ) );
upd.setTableName( TARGET_TABLE );
upd.setUpdateLookup( new String[] { "VALUE" } );
upd.setUpdateStream( new String[] { "VALUE" } );
upd.setErrorIgnored( true );
String fromid = registry.getPluginId( StepPluginType.class, upd );
StepMeta updateStep = new StepMeta( fromid, updateStepName, upd );
updateStep.setDescription( "update data in table [" + TARGET_TABLE + "] on database [" + dbInfo + "]" );
transMeta.addStep( updateStep );
TransHopMeta hi = new TransHopMeta( injectorStep, updateStep );
transMeta.addTransHop( hi );
/* PREPARE TRANSFORMATION EXECUTION */
trans = new Trans( transMeta );
trans.prepareExecution( null );
StepInterface si = trans.getStepInterface( updateStepName, 0 );
rc = new RowStepCollector();
si.addRowListener( rc );
rp = trans.addRowProducer( injectorStepName, 0 );
}
@Override
@After
public void tearDown() throws Exception {
/* DROP THE TEST TABLE */
if ( db != null ) {
db.execStatement( "DROP TABLE " + TARGET_TABLE + ";" );
db.disconnect();
}
db = null;
upd = null;
trans = null;
rc = null;
rp = null;
}
public List<RowMetaAndData> createMatchingDataRows() {
RowMetaInterface rm = getTargetTableRowMeta();
List<RowMetaAndData> list = new ArrayList<RowMetaAndData>();
list.add( new RowMetaAndData( rm, new Object[] { null, null, "updated" } ) );
list.add( new RowMetaAndData( rm, new Object[] { null, 1L, "updated" } ) );
list.add( new RowMetaAndData( rm, new Object[] { 1L, null, "updated" } ) );
list.add( new RowMetaAndData( rm, new Object[] { 2L, 2L, "updated" } ) );
return list;
}
// this method pumps rows to the update step;
public void pumpMatchingRows() throws Exception {
pumpRows( createMatchingDataRows() );
}
public void pumpRows( List<RowMetaAndData> inputList ) throws Exception {
trans.startThreads();
// add rows
for ( RowMetaAndData rm : inputList ) {
rp.putRow( rm.getRowMeta(), rm.getData() );
}
rp.finished();
trans.waitUntilFinished();
if ( trans.getErrors() > 0 ) {
fail( "test transformation failed, check logs!" );
}
}
public String[] getDbRows() throws Exception {
ResultSet rs = db.openQuery( "SELECT VALUE FROM " + TARGET_TABLE + " ORDER BY ROW_ORDER ASC;" );
ArrayList<String> rows = new ArrayList<String>();
while ( rs.next() ) {
rows.add( rs.getString( "VALUE" ) );
}
return rows.toArray( new String[0] );
}
public void testUpdateEquals() throws Exception {
addLookup( new String[] { "ID", "=", "ID", "" } );
pumpMatchingRows();
String[] rows = getDbRows();
// now the 1,null and 2,2 record should have been updated
String[] expected = { "null_id_code", "null_id", "updated", "updated" };
assertArrayEquals( "Unexpected changes by update step", expected, rows );
}
public void testUpdateEqualsSkip() throws Exception {
upd.setSkipLookup( true );
testUpdateEquals();
}
public void testUpdateEqualsTwoKeys() throws Exception {
addLookup( new String[] { "ID", "=", "ID", "" } );
addLookup( new String[] { "CODE", "=", "CODE", "" } );
pumpMatchingRows();
String[] rows = getDbRows();
// now the 2,2 record should have been updated
String[] expected = { "null_id_code", "null_id", "null_code", "updated" };
assertArrayEquals( "Unexpected changes by update step", expected, rows );
}
public void testUpdateEqualsTwoKeysSkip() throws Exception {
upd.setSkipLookup( true );
testUpdateEqualsTwoKeys();
}
public void testUpdateEqualsSupportsNull() throws Exception {
addLookup( new String[] { "ID", "= ~NULL", "ID", "" } );
pumpMatchingRows();
String[] rows = getDbRows();
// now all records should have been updated
String[] expected = { "updated", "updated", "updated", "updated" };
assertArrayEquals( "Unexpected changes by update step", expected, rows );
}
public void testUpdateEqualsSupportsNullSkip() throws Exception {
upd.setSkipLookup( true );
testUpdateEqualsSupportsNull();
}
public void testUpdateEqualsSupportsNullTwoKeys() throws Exception {
addLookup( new String[] { "ID", "= ~NULL", "ID", "" } );
addLookup( new String[] { "CODE", "= ~NULL", "CODE", "" } );
pumpMatchingRows();
String[] rows = getDbRows();
// now all records should have been updated
String[] expected = { "updated", "updated", "updated", "updated" };
assertArrayEquals( "Unexpected changes by update step", expected, rows );
}
public void testUpdateEqualsSupportsNullTwoKeysSkip() throws Exception {
upd.setSkipLookup( true );
testUpdateEqualsSupportsNullTwoKeys();
}
public void testUpdateEqualsSupportsNullTwoKeysMixed() throws Exception {
addLookup( new String[] { "ID", "= ~NULL", "ID", "" } );
addLookup( new String[] { "CODE", "=", "CODE", "" } );
pumpMatchingRows();
String[] rows = getDbRows();
// now [null,1], [2,2] records should have been updated
String[] expected = { "null_id_code", "updated", "null_code", "updated" };
assertArrayEquals( "Unexpected changes by update step", expected, rows );
}
public void testUpdateEqualsSupportsNullTwoKeysMixedSkip() throws Exception {
upd.setSkipLookup( true );
testUpdateEqualsSupportsNullTwoKeysMixed();
}
public void testUpdateIsNull() throws Exception {
addLookup( new String[] { "CODE", "IS NULL", "", "" } );
pumpMatchingRows();
String[] rows = getDbRows();
// now [null, null], [1,null] records should have been updated
String[] expected = { "updated", "null_id", "updated", "non_null_keys" };
assertArrayEquals( "Unexpected changes by update step", expected, rows );
}
public void testUpdateIsNullSkip() throws Exception {
upd.setSkipLookup( true );
testUpdateIsNull();
}
public void testUpdateIsNotNull() throws Exception {
addLookup( new String[] { "CODE", "IS NOT NULL", "", "" } );
pumpMatchingRows();
String[] rows = getDbRows();
// now [null, 1], [2,2] records should have been updated
String[] expected = { "null_id_code", "updated", "null_code", "updated" };
assertArrayEquals( "Unexpected changes by update step", expected, rows );
}
public void testUpdateIsNotNullSkip() throws Exception {
upd.setSkipLookup( true );
testUpdateIsNotNull();
}
public void testUpdateBetween() throws Exception {
addLookup( new String[] { "ID", "BETWEEN", "ID", "CODE" } );
pumpMatchingRows();
String[] rows = getDbRows();
// now [2,2] record should have been updated
String[] expected = { "null_id_code", "null_id", "null_code", "updated" };
assertArrayEquals( "Unexpected changes by update step", expected, rows );
}
public void testUpdateBetweenSkip() throws Exception {
upd.setSkipLookup( true );
testUpdateBetween();
}
public void testUpdateEqualsSupportsNullTwoKeysMixed2() throws Exception {
addLookup( new String[] { "ID", "=", "ID", "" } );
addLookup( new String[] { "CODE", "= ~NULL", "CODE", "" } );
pumpMatchingRows();
String[] rows = getDbRows();
// now [1,null], [2,2] records should have been updated
String[] expected = { "null_id_code", "null_id", "updated", "updated" };
assertArrayEquals( "Unexpected changes by update step", expected, rows );
}
public void testUpdateEqualsSupportsNullTwoKeysMixed2Skip() throws Exception {
upd.setSkipLookup( true );
testUpdateEqualsSupportsNullTwoKeysMixed2();
}
}
| apache-2.0 |
UniTime/unitime | JavaSource/org/unitime/timetable/spring/ldap/SpringLdapExternalUidTranslation.java | 3629 | /*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.spring.ldap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.ldap.core.ContextSource;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.security.ldap.SpringSecurityLdapTemplate;
import org.unitime.timetable.defaults.ApplicationProperty;
import org.unitime.timetable.interfaces.ExternalUidTranslation;
import org.unitime.timetable.spring.SpringApplicationContextHolder;
/**
* @author Tomas Muller
*/
public class SpringLdapExternalUidTranslation implements ExternalUidTranslation {
private static Log sLog = LogFactory.getLog(SpringLdapExternalUidTranslation.class);
public String translate(String uid, Source source, Source target) {
if (uid==null || source.equals(target)) return uid;
if (source.equals(Source.LDAP)) return uid2ext(uid);
if (target.equals(Source.LDAP)) return ext2uid(uid);
return uid;
}
public String uid2ext(String uid) {
String externalIdAttribute = ApplicationProperty.AuthenticationLdapIdAttribute.value();
if ("uid".equals(externalIdAttribute)) return uid; // Nothing to translate
try {
ContextSource source = (ContextSource)SpringApplicationContextHolder.getBean("unitimeLdapContextSource");
String query = ApplicationProperty.AuthenticationLdapLogin2UserId.value();
SpringSecurityLdapTemplate template = new SpringSecurityLdapTemplate(source);
DirContextOperations user = template.retrieveEntry(query.replaceAll("\\{0\\}", uid), new String[] {externalIdAttribute});
return user == null ? null : user.getStringAttribute(externalIdAttribute);
} catch (Exception e) {
sLog.warn("Unable to translate uid to " + externalIdAttribute + ": " + e.getMessage());
}
return null;
}
public String ext2uid(String externalUserId) {
String externalIdAttribute = ApplicationProperty.AuthenticationLdapIdAttribute.value();
if ("uid".equals(externalIdAttribute)) return externalUserId; // Nothing to translate
try {
ContextSource source = (ContextSource)SpringApplicationContextHolder.getBean("unitimeLdapContextSource");
String query = ApplicationProperty.AuthenticationLdapUserId2Login.value().replace("%", externalIdAttribute);
SpringSecurityLdapTemplate template = new SpringSecurityLdapTemplate(source);
DirContextOperations user = template.retrieveEntry(query.replaceAll("\\{0\\}", externalIdAttribute), new String[] {"uid"});
return user == null ? null : user.getStringAttribute("uid");
} catch (Exception e) {
sLog.warn("Unable to translate " + externalIdAttribute + " to uid: " + e.getMessage());
}
return null;
}
}
| apache-2.0 |
googlecodelabs/tv-recommendations | 6-final/src/main/java/com/example/android/tv/recommendations/model/Subscription.java | 2331 | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.example.android.tv.recommendations.model;
/** Contains the data about a channel that will be displayed on the launcher. */
public class Subscription {
private long channelId;
private String name;
private String description;
private String appLinkIntentUri;
private int channelLogo;
/** Constructor for Gson to use. */
public Subscription() {}
private Subscription(
String name, String description, String appLinkIntentUri, int channelLogo) {
this.name = name;
this.description = description;
this.appLinkIntentUri = appLinkIntentUri;
this.channelLogo = channelLogo;
}
public static Subscription createSubscription(
String name, String description, String appLinkIntentUri, int channelLogo) {
return new Subscription(name, description, appLinkIntentUri, channelLogo);
}
public long getChannelId() {
return channelId;
}
public void setChannelId(long channelId) {
this.channelId = channelId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAppLinkIntentUri() {
return appLinkIntentUri;
}
public void setAppLinkIntentUri(String appLinkIntentUri) {
this.appLinkIntentUri = appLinkIntentUri;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int getChannelLogo() {
return channelLogo;
}
public void setChannelLogo(int channelLogo) {
this.channelLogo = channelLogo;
}
}
| apache-2.0 |
milleruntime/accumulo | test/src/main/java/org/apache/accumulo/test/compaction/NonCommittingExternalCompactionThriftClientHandler.java | 1994 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.test.compaction;
import org.apache.accumulo.core.clientImpl.thrift.ThriftSecurityException;
import org.apache.accumulo.core.dataImpl.thrift.TKeyExtent;
import org.apache.accumulo.core.securityImpl.thrift.TCredentials;
import org.apache.accumulo.core.tabletserver.thrift.TabletClientService;
import org.apache.accumulo.core.trace.thrift.TInfo;
import org.apache.accumulo.tserver.TabletServer;
import org.apache.accumulo.tserver.ThriftClientHandler;
import org.apache.thrift.TException;
public class NonCommittingExternalCompactionThriftClientHandler extends ThriftClientHandler
implements TabletClientService.Iface {
public NonCommittingExternalCompactionThriftClientHandler(TabletServer server) {
super(server);
}
@Override
public void compactionJobFinished(TInfo tinfo, TCredentials credentials,
String externalCompactionId, TKeyExtent extent, long fileSize, long entries)
throws ThriftSecurityException, TException {
// do nothing
}
@Override
public void compactionJobFailed(TInfo tinfo, TCredentials credentials,
String externalCompactionId, TKeyExtent extent) throws TException {
// do nothing
}
}
| apache-2.0 |
spring-projects/spring-boot | spring-boot-project/spring-boot-docs/src/main/java/org/springframework/boot/docs/data/sql/jooq/dslcontext/MyBean.java | 1344 | /*
* Copyright 2012-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.docs.data.sql.jooq.dslcontext;
import java.util.GregorianCalendar;
import java.util.List;
import org.jooq.DSLContext;
import org.springframework.stereotype.Component;
import static org.springframework.boot.docs.data.sql.jooq.dslcontext.Tables.AUTHOR;
@Component
public class MyBean {
private final DSLContext create;
public MyBean(DSLContext dslContext) {
this.create = dslContext;
}
// tag::method[]
public List<GregorianCalendar> authorsBornAfter1980() {
// @formatter:off
return this.create.selectFrom(AUTHOR)
.where(AUTHOR.DATE_OF_BIRTH.greaterThan(new GregorianCalendar(1980, 0, 1)))
.fetch(AUTHOR.DATE_OF_BIRTH);
// @formatter:on
} // end::method[]
}
| apache-2.0 |
qtvbwfn/dubbo | dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/registry/nacos/demo/provider/DemoServiceProviderXmlBootstrap.java | 1516 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.registry.nacos.demo.provider;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import java.io.IOException;
/**
* {@link org.apache.dubbo.config.spring.registry.nacos.demo.service.DemoService} provider demo XML bootstrap
*/
public class DemoServiceProviderXmlBootstrap {
public static void main(String[] args) throws IOException {
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext();
context.setConfigLocation("/META-INF/spring/dubbo-nacos-provider-context.xml");
context.refresh();
System.out.println("DemoService provider (XML) is starting...");
System.in.read();
}
}
| apache-2.0 |
ThangBK2009/android-source-browsing.platform--external--hsqldb | src/org/hsqldb/StatementSchemaDefinition.java | 8603 | /* Copyright (c) 2001-2010, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb;
import org.hsqldb.HsqlNameManager.HsqlName;
import org.hsqldb.error.Error;
import org.hsqldb.error.ErrorCode;
import org.hsqldb.lib.HsqlArrayList;
import org.hsqldb.result.Result;
/**
* Implementation of Statement for CREATE SCHEMA statements.<p>
*
* @author Fred Toussi (fredt@users dot sourceforge.net)
* @version 1.9.0
* @since 1.9.0
*/
public class StatementSchemaDefinition extends StatementSchema {
StatementSchema[] statements;
StatementSchemaDefinition(StatementSchema[] statements) {
super();
this.statements = statements;
}
public Result execute(Session session) {
Result result;
try {
result = getResult(session);
} catch (Throwable t) {
result = Result.newErrorResult(t, null);
}
if (result.isError()) {
result.getException().setStatementType(group, type);
}
return result;
}
Result getResult(Session session) {
HsqlName schemaDefinitionName = statements[0].getSchemaName();
if (this.isExplain) {
return Result.newSingleColumnStringResult("OPERATION",
describe(session));
}
StatementSchema cs;
Result result = statements[0].execute(session);
HsqlArrayList constraints = new HsqlArrayList();
StatementSchema log = new StatementSchema(null,
StatementTypes.LOG_SCHEMA_STATEMENT, null);
if (statements.length == 1 || result.isError()) {
return result;
}
HsqlName oldSessionSchema = session.getCurrentSchemaHsqlName();
for (int i = 1; i < statements.length; i++) {
try {
session.setSchema(schemaDefinitionName.name);
} catch (HsqlException e) {}
statements[i].setSchemaHsqlName(schemaDefinitionName);
session.parser.reset(statements[i].getSQL());
try {
session.parser.read();
switch (statements[i].getType()) {
case StatementTypes.GRANT :
case StatementTypes.GRANT_ROLE :
result = statements[i].execute(session);
break;
case StatementTypes.CREATE_TABLE :
cs = session.parser.compileCreate();
cs.isSchemaDefinition = true;
cs.setSchemaHsqlName(schemaDefinitionName);
if (session.parser.token.tokenType
!= Tokens.X_ENDPARSE) {
throw session.parser.unexpectedToken();
}
cs.isLogged = false;
result = cs.execute(session);
HsqlName name = ((Table) cs.arguments[0]).getName();
Table table =
(Table) session.database.schemaManager
.getSchemaObject(name);
constraints.addAll((HsqlArrayList) cs.arguments[1]);
((HsqlArrayList) cs.arguments[1]).clear();
//
log.sql = table.getSQL();
log.execute(session);
break;
case StatementTypes.CREATE_ROLE :
case StatementTypes.CREATE_SEQUENCE :
case StatementTypes.CREATE_TYPE :
case StatementTypes.CREATE_CHARACTER_SET :
case StatementTypes.CREATE_COLLATION :
result = statements[i].execute(session);
break;
case StatementTypes.CREATE_INDEX :
case StatementTypes.CREATE_TRIGGER :
case StatementTypes.CREATE_VIEW :
case StatementTypes.CREATE_DOMAIN :
case StatementTypes.CREATE_ROUTINE :
cs = session.parser.compileCreate();
cs.isSchemaDefinition = true;
cs.setSchemaHsqlName(schemaDefinitionName);
if (session.parser.token.tokenType
!= Tokens.X_ENDPARSE) {
throw session.parser.unexpectedToken();
}
result = cs.execute(session);
break;
case StatementTypes.CREATE_ASSERTION :
case StatementTypes.CREATE_TRANSFORM :
case StatementTypes.CREATE_TRANSLATION :
case StatementTypes.CREATE_CAST :
case StatementTypes.CREATE_ORDERING :
throw session.parser.unsupportedFeature();
default :
throw Error.runtimeError(ErrorCode.U_S0500, "");
}
if (result.isError()) {
break;
}
} catch (HsqlException e) {
result = Result.newErrorResult(e, statements[i].getSQL());
break;
}
}
if (!result.isError()) {
try {
for (int i = 0; i < constraints.size(); i++) {
Constraint c = (Constraint) constraints.get(i);
Table table =
session.database.schemaManager.getUserTable(session,
c.core.refTableName);
ParserDDL.addForeignKey(session, table, c, null);
log.sql = c.getSQL();
log.execute(session);
}
} catch (HsqlException e) {
result = Result.newErrorResult(e, sql);
}
}
if (result.isError()) {
try {
session.database.schemaManager.dropSchema(session,
schemaDefinitionName.name, true);
session.database.logger.writeToLog(
session, getDropSchemaStatement(schemaDefinitionName));
} catch (HsqlException e) {}
}
try {
session.setCurrentSchemaHsqlName(oldSessionSchema);
} catch (Exception e) {}
return result;
}
/*
if (constraintList != null && constraintList.size() > 0) {
try {
for (int i = 0; i < constraintList.size(); i++) {
Constraint c = (Constraint) constraintList.get(i);
Table table = database.schemaManager.getUserTable(session,
c.core.refTableName);
addForeignKey(table, c);
}
} finally {
constraintList.clear();
}
}
*/
String getDropSchemaStatement(HsqlName schema) {
return "DROP SCHEMA " + schema.statementName + " " + Tokens.T_CASCADE;
}
}
| bsd-3-clause |
vmluan/dhis2-core | dhis-2/dhis-web/dhis-web-light/src/main/java/org/hisp/dhis/light/utils/NamebasedUtilsImpl.java | 6922 | package org.hisp.dhis.light.utils;
/*
* Copyright (c) 2004-2017, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.mobile.service.ModelMapping;
import org.hisp.dhis.program.Program;
import org.hisp.dhis.program.ProgramService;
import org.hisp.dhis.program.ProgramStage;
import org.hisp.dhis.program.ProgramStageDataElement;
import org.hisp.dhis.program.ProgramStageInstance;
import org.hisp.dhis.program.ProgramStageService;
import org.hisp.dhis.system.util.MathUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
public class NamebasedUtilsImpl
implements NamebasedUtils
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private ProgramService programService;
public void setProgramService( ProgramService programService )
{
this.programService = programService;
}
private ProgramStageService programStageService;
public void setProgramStageService( ProgramStageService programStageService )
{
this.programStageService = programStageService;
}
@Override
public ProgramStage getProgramStage( int programId, int programStageId )
{
Program program = programService.getProgram( programId );
Collection<ProgramStage> stages = program.getProgramStages();
for ( ProgramStage programStage : stages )
{
if ( programStage.getId() == programStageId )
{
return programStage;
}
}
return null;
}
@Override
public String getTypeViolation( DataElement dataElement, String value )
{
ValueType valueType = dataElement.getValueType();
if ( valueType.isText() )
{
}
else if ( ValueType.BOOLEAN == valueType )
{
if ( !ValueUtils.isBoolean( value ) )
{
return "is_invalid_boolean";
}
}
else if ( ValueType.DATE == valueType )
{
if ( !ValueUtils.isDate( value ) )
{
return "is_invalid_date";
}
}
else if ( ValueType.NUMBER == valueType )
{
if ( !MathUtils.isNumeric( value ) )
{
return "is_invalid_number";
}
}
else if ( ValueType.INTEGER == valueType )
{
if ( !MathUtils.isInteger( value ) )
{
return "is_invalid_integer";
}
}
else if ( ValueType.INTEGER_POSITIVE == valueType )
{
if ( !MathUtils.isPositiveInteger( value ) )
{
return "is_invalid_positive_integer";
}
}
else if ( ValueType.INTEGER_NEGATIVE == valueType )
{
if ( !MathUtils.isNegativeInteger( value ) )
{
return "is_invalid_negative_integer";
}
}
else if ( ValueType.INTEGER_ZERO_OR_POSITIVE == valueType )
{
if ( !MathUtils.isZeroOrPositiveInteger( value ) )
{
return "is_invalid_zero_or_positive_integer";
}
}
else if ( ValueType.COORDINATE == valueType )
{
if ( !MathUtils.isCoordinate( value ) )
{
return "is_invalid_coordinate";
}
}
return null;
}
@Override
public ProgramStageInstance getNextStage( Set<ProgramStageInstance> programStageInstances )
{
for ( ProgramStageInstance programStageInstance : programStageInstances )
{
if ( !programStageInstance.isCompleted() )
{
return programStageInstance;
}
}
return null;
}
@Override
public List<org.hisp.dhis.api.mobile.model.DataElement> transformDataElementsToMobileModel( Integer programStageId )
{
ProgramStage programStage = programStageService.getProgramStage( programStageId );
List<org.hisp.dhis.api.mobile.model.DataElement> des = new ArrayList<>();
List<ProgramStageDataElement> programStageDataElements = new ArrayList<>( programStage.getProgramStageDataElements() );
des = transformDataElementsToMobileModel( programStageDataElements );
return des;
}
@Override
public List<org.hisp.dhis.api.mobile.model.DataElement> transformDataElementsToMobileModel( List<ProgramStageDataElement> programStageDataElements )
{
List<org.hisp.dhis.api.mobile.model.DataElement> des = new ArrayList<>();
for ( ProgramStageDataElement programStagedataElement : programStageDataElements )
{
DataElement dataElement = programStagedataElement.getDataElement();
org.hisp.dhis.api.mobile.model.DataElement de = ModelMapping.getDataElement( dataElement );
de.setCompulsory( programStagedataElement.isCompulsory() );
des.add( de );
}
return des;
}
}
| bsd-3-clause |
broadinstitute/hellbender | src/main/java/org/broadinstitute/hellbender/tools/copynumber/formats/records/CalledCopyRatioSegment.java | 1723 | package org.broadinstitute.hellbender.tools.copynumber.formats.records;
import org.broadinstitute.hellbender.utils.Utils;
public class CalledCopyRatioSegment extends CopyRatioSegment {
public enum Call {
AMPLIFICATION("+"),
DELETION("-"),
NEUTRAL("0");
private final String outputString;
Call(final String outputString) {
this.outputString = outputString;
}
public String getOutputString() {
return outputString;
}
}
private final Call call;
public CalledCopyRatioSegment(final CopyRatioSegment segment,
final Call call) {
super(segment.getInterval(), segment.getNumPoints(), segment.getMeanLog2CopyRatio());
this.call = Utils.nonNull(call);
}
public Call getCall() {
return call;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
final CalledCopyRatioSegment that = (CalledCopyRatioSegment) o;
return call == that.call;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + call.hashCode();
return result;
}
@Override
public String toString() {
return "CalledCopyRatioSegment{" +
"interval=" + getInterval() +
", numPoints=" + getNumPoints() +
", meanLog2CopyRatio=" + getMeanLog2CopyRatio() +
", call=" + call +
'}';
}
}
| bsd-3-clause |
kleingeist/xtreemfs | java/servers/src/org/xtreemfs/common/clients/File.java | 19416 | /*
* Copyright (c) 2009-2011 by Bjoern Kolbeck,
* Zuse Institute Berlin
*
* Licensed under the BSD License, see LICENSE file for details.
*
*/
package org.xtreemfs.common.clients;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.xtreemfs.common.ReplicaUpdatePolicies;
import org.xtreemfs.foundation.json.JSONException;
import org.xtreemfs.foundation.json.JSONParser;
import org.xtreemfs.foundation.json.JSONString;
import org.xtreemfs.foundation.pbrpc.client.RPCAuthentication;
import org.xtreemfs.foundation.pbrpc.client.RPCResponse;
import org.xtreemfs.foundation.pbrpc.generatedinterfaces.RPC.UserCredentials;
import org.xtreemfs.pbrpc.generatedinterfaces.GlobalTypes.SYSTEM_V_FCNTL;
import org.xtreemfs.pbrpc.generatedinterfaces.MRC.Stat;
import org.xtreemfs.pbrpc.generatedinterfaces.OSD.xtreemfs_internal_get_file_sizeResponse;
/**
*
* @author bjko
*/
public class File {
public static final String XTREEMFSSET_REPL_UPDATE_POLICY_XATTR = "xtreemfs.set_repl_update_policy";
public static final String XTREEMFS_DEFAULT_RP = "xtreemfs.default_rp";
private final Volume volume;
private final String path;
private final UserCredentials userCreds;
File(Volume volume, UserCredentials userCreds, String path) {
this.volume = volume;
this.path = path;
this.userCreds = userCreds;
}
public String getPath() {
return path;
}
/**
* check if path is a file
* @param userCreds the user's credentials
* @see java.io.File
* @return true if it is a file, false otherwise (also if path does not exist)
*/
public boolean isFile(UserCredentials userCreds) throws IOException {
Stat stat = volume.stat(path, userCreds);
if (stat != null)
return (stat.getMode() & SYSTEM_V_FCNTL.SYSTEM_V_FCNTL_H_S_IFREG.getNumber()) > 0;
else
return false;
}
/**
* check if path is a file
* @see java.io.File
* @return true if it is a file, false otherwise (also if path does not exist)
*/
public boolean isFile() throws IOException {
return isFile(userCreds);
}
/**
* check if path is a directory
* @param userCreds the user's credentials
* @see java.io.File
* @return true if it is a directory, false otherwise (also if path does not exist)
*/
public boolean isDirectory(UserCredentials userCreds) throws IOException {
Stat stat = volume.stat(path, userCreds);
if (stat != null)
return (stat.getMode() & SYSTEM_V_FCNTL.SYSTEM_V_FCNTL_H_S_IFDIR.getNumber()) > 0;
else
return false;
}
/**
* check if path is a directory
* @see java.io.File
* @return true if it is a directory, false otherwise (also if path does not exist)
*/
public boolean isDirectory() throws IOException {
return isDirectory(userCreds);
}
/**
* check if path exists (file or directory)
* @param userCreds the user's credentials
* @see java.io.File
* @return true if it exists, false otherwise
*/
public boolean exists(UserCredentials userCreds) throws IOException {
try {
Stat stat = volume.stat(path, userCreds);
} catch (FileNotFoundException ex) {
return false;
}
return true;
}
/**
* check if path exists (file or directory)
* @see java.io.File
* @return true if it exists, false otherwise
*/
public boolean exists() throws IOException {
return exists(userCreds);
}
public boolean canRead(UserCredentials userCreds) throws IOException {
try {
Stat stat = volume.stat(path, userCreds);
return (stat.getMode() & 0400) > 0;
} catch (FileNotFoundException ex) {
return false;
}
}
public boolean canRead() throws IOException {
return canRead(userCreds);
}
public boolean canWrite(UserCredentials userCreds) throws IOException {
try {
Stat stat = volume.stat(path, userCreds);
return (stat.getMode() & 0200) > 0;
} catch (FileNotFoundException ex) {
return false;
}
}
public boolean canWrite() throws IOException {
return canWrite(userCreds);
}
public long lastModified(UserCredentials userCreds) throws IOException {
Stat stat = volume.stat(path, userCreds);
return stat.getMtimeNs()/1000000;
}
public long lastModified() throws IOException {
return lastModified(userCreds);
}
/**
* get file size
* @param userCreds the user's credentials
* @return the files size in bytes, or 0L if it does not exist
* @throws IOException
*/
public long length(UserCredentials userCreds) throws IOException {
// if the volume is a snapshot, perform a size glimpse at the OSD
if (volume.isSnapshot()) {
RPCResponse<xtreemfs_internal_get_file_sizeResponse> fs = null;
try {
RandomAccessFile file = volume.openFile(this, SYSTEM_V_FCNTL.SYSTEM_V_FCNTL_H_O_RDONLY.getNumber(), 0, userCreds);
fs = volume.osdClient.xtreemfs_internal_get_file_size(getReplica(0).getOSDAddress(0), RPCAuthentication.authNone, RPCAuthentication.userService,
file.getCredentials(), file.getFileId());
return fs.get().getFileSize();
} catch (Exception exc) {
exc.printStackTrace();
return 0;
} finally {
if (fs != null)
fs.freeBuffers();
}
}
// otherwise, fetch the file size from the MRC
else {
Stat stat = volume.stat(path, userCreds);
if (stat != null) {
return stat.getSize();
} else
return 0L;
}
}
/**
* get file size
* @return the files size in bytes, or 0L if it does not exist
* @throws IOException
*/
public long length() throws IOException {
return length(userCreds);
}
public void mkdir(int permissions, UserCredentials userCreds) throws IOException {
volume.mkdir(path, permissions, userCreds);
}
public void mkdir(int permissions) throws IOException {
mkdir(permissions, userCreds);
}
public void createFile(UserCredentials userCreds) throws IOException {
volume.touch(path, userCreds);
}
public void createFile() throws IOException {
createFile(userCreds);
}
public Stat stat(UserCredentials userCreds) throws IOException {
return volume.stat(path, userCreds);
}
public Stat stat() throws IOException {
return stat(userCreds);
}
public void renameTo(File dest, UserCredentials userCreds) throws IOException {
volume.rename(this.path,dest.path, userCreds);
}
public void renameTo(File dest) throws IOException {
renameTo(dest, userCreds);
}
public void delete(UserCredentials userCreds) throws IOException {
volume.unlink(this.path, userCreds);
}
public void delete() throws IOException {
delete(userCreds);
}
public String getxattr(String name, UserCredentials userCreds) throws IOException {
return volume.getxattr(path, name, userCreds);
}
public String getxattr(String name) throws IOException {
return getxattr(name, userCreds);
}
public String[] listXAttrs(UserCredentials userCreds) throws IOException {
return volume.listxattr(path, userCreds);
}
public String[] listXAttrs() throws IOException {
return listXAttrs(userCreds);
}
public void setxattr(String name, String value, UserCredentials userCreds) throws IOException {
volume.setxattr(path, name, value, userCreds);
}
public void setxattr(String name, String value) throws IOException {
setxattr(name, value, userCreds);
}
public void chmod(int mode, UserCredentials userCreds) throws IOException {
volume.chmod(path, mode, userCreds);
}
public void chmod(int mode) throws IOException {
chmod(mode, userCreds);
}
public void chown(String user, UserCredentials userCreds) throws IOException {
volume.chown(path, user, userCreds);
}
public void chown(String user) throws IOException {
chown(user, userCreds);
}
public void chgrp(String group, UserCredentials userCreds) throws IOException {
volume.chgrp(path, group, userCreds);
}
public void chgrp(String group) throws IOException {
chgrp(group, userCreds);
}
public void setACL(Map<String, Object> aclEntries, UserCredentials userCreds) throws IOException {
volume.setACL(path, aclEntries, userCreds);
}
public void setACL(Map<String, Object> aclEntries) throws IOException {
setACL(aclEntries, userCreds);
}
public Map<String, Object> getACL(UserCredentials userCreds) throws IOException {
return volume.getACL(path, userCreds);
}
public Map<String, Object> getACL() throws IOException {
return getACL(userCreds);
}
public RandomAccessFile open(String openMode, int permissions, UserCredentials userCreds) throws IOException {
int flags = 0;
if (openMode.contains("rw")) {
flags |= SYSTEM_V_FCNTL.SYSTEM_V_FCNTL_H_O_RDWR.getNumber();
flags |= SYSTEM_V_FCNTL.SYSTEM_V_FCNTL_H_O_CREAT.getNumber();
} else if (openMode.contains("r")) {
flags |= SYSTEM_V_FCNTL.SYSTEM_V_FCNTL_H_O_RDONLY.getNumber();
}
if (openMode.contains("t")) {
flags |= SYSTEM_V_FCNTL.SYSTEM_V_FCNTL_H_O_TRUNC.getNumber();
}
if (openMode.contains("d") || openMode.contains("s")) {
flags |= SYSTEM_V_FCNTL.SYSTEM_V_FCNTL_H_O_SYNC.getNumber();
}
return volume.openFile(this, flags, permissions, userCreds);
}
public RandomAccessFile open(String openMode, int permissions) throws IOException {
return open(openMode, permissions, userCreds);
}
public int getNumReplicas(UserCredentials userCreds) throws IOException {
try {
Map<String,Object> xloc = getLocations(userCreds);
List<Map<String,Object>> replicas = (List<Map<String, Object>>) xloc.get("replicas");
return replicas.size();
} catch (ClassCastException ex) {
throw new IOException("cannot parse file's location list: "+ex,ex);
}
}
public int getNumReplicas() throws IOException {
return getNumReplicas(userCreds);
}
public Replica getReplica(int replicaNo, UserCredentials userCreds) throws IOException {
try {
Map<String,Object> xloc = getLocations(userCreds);
List<Map<String,Object>> replicas = (List<Map<String, Object>>) xloc.get("replicas");
if (replicas.size() <= replicaNo)
throw new IllegalArgumentException("replicaNo is out of bounds");
return new Replica(this,replicas.get(replicaNo),userCreds);
} catch (JSONException ex) {
throw new IOException("cannot parse file's location list: "+ex,ex);
} catch (ClassCastException ex) {
throw new IOException("cannot parse file's location list: "+ex,ex);
}
}
public Replica getReplica(int replicaNo) throws IOException {
return getReplica(replicaNo, userCreds);
}
public Replica getReplica(String osdUUID, UserCredentials userCreds) throws IOException {
Replica[] repls = getReplicas(userCreds);
for (Replica r : repls) {
for (int i = 0; i < r.getStripeWidth(); i++) {
if (r.getOSDUuid(i).equals(osdUUID))
return r;
}
}
return null;
}
public Replica getReplica(String osdUUID) throws IOException {
return getReplica(osdUUID, userCreds);
}
public Replica[] getReplicas(UserCredentials userCreds) throws IOException {
try {
Map<String,Object> xloc = getLocations(userCreds);
List<Map<String,Object>> replicas = (List<Map<String, Object>>) xloc.get("replicas");
Replica[] repls = new Replica[replicas.size()];
for (int i = 0; i < repls.length; i++)
repls[i] = new Replica(this,replicas.get(i),userCreds);
return repls;
} catch (JSONException ex) {
throw new IOException("cannot parse file's location list",ex);
} catch (ClassCastException ex) {
throw new IOException("cannot parse file's location list",ex);
}
}
public Replica[] getReplicas() throws IOException {
return getReplicas(userCreds);
}
public void setDefaultReplication(String policy, int numReplicas, UserCredentials userCreds) throws IOException {
String JSON = "{ \"update-policy\" : \""+policy+"\", \"replication-factor\" : "+numReplicas+" }";
if (!isDirectory())
throw new IOException("only diretories (including root) have a default replication policy");
volume.setxattr(path, XTREEMFS_DEFAULT_RP, JSON, userCreds);
}
public void setDefaultReplication(String policy, int numReplicas) throws IOException {
setDefaultReplication(policy, numReplicas, userCreds);
}
public boolean isReadOnlyReplicated(UserCredentials userCreds) throws IOException {
try {
Map<String,Object> xloc = getLocations(userCreds);
String uPolicy = (String) xloc.get("update-policy");
return uPolicy.equals(ReplicaUpdatePolicies.REPL_UPDATE_PC_RONLY);
} catch (ClassCastException ex) {
throw new IOException("cannot parse file's location list",ex);
}
}
public boolean isReadOnlyReplicated() throws IOException {
return isReadOnlyReplicated(userCreds);
}
public void setReadOnly(boolean mode, UserCredentials userCreds) throws Exception {
boolean currentMode = Boolean.valueOf(getxattr("xtreemfs.read_only"));
if (currentMode == mode)
return;
if (mode) {
//make sure the file is not open!
//open file
RandomAccessFile raf = open("r", 0, userCreds);
//fetch file sizes
long osd_file_size = raf.getFileSizeOnOSD();
long mrc_file_size = length(userCreds);
//update file size if incorrect on MRC
if (osd_file_size != mrc_file_size) {
raf.forceFileSize(osd_file_size);
}
setxattr("xtreemfs.read_only", "true");
} else {
if (getNumReplicas() > 1)
throw new IOException("File has still replicas.");
else {
// set read only
setxattr("xtreemfs.read_only", "false");
}
}
}
public void setReadOnly(boolean mode) throws Exception {
setReadOnly(mode, userCreds);
}
public boolean isReadOnly(UserCredentials userCreds) throws IOException {
return Boolean.valueOf(getxattr("xtreemfs.read_only", userCreds));
}
public boolean isReadOnly() throws IOException {
return isReadOnly(userCreds);
}
public boolean isReplicated(UserCredentials userCreds) throws IOException {
Map<String,Object> l = getLocations(userCreds);
String updatePolicy = (String)l.get("update-policy");
return !updatePolicy.equals(ReplicaUpdatePolicies.REPL_UPDATE_PC_NONE);
}
public boolean isReplicated() throws IOException {
return isReplicated(userCreds);
}
public String[] getSuitableOSDs(int numOSDs, UserCredentials userCreds) throws IOException {
List<String> osds = volume.getSuitableOSDs(this, numOSDs, userCreds);
return osds.toArray(new String[osds.size()]);
}
public String[] getSuitableOSDs(int numOSDs) throws IOException {
return getSuitableOSDs(numOSDs, userCreds);
}
public void addReplica(int width, String[] osdUuids, int flags, UserCredentials userCreds) throws IOException {
List<String> osdSet = new ArrayList(20);
for (String osd : osdUuids) {
if (osdSet.size() == width)
break;
osdSet.add(osd);
}
if (osdSet.size() != width)
throw new IllegalArgumentException("number of OSDs must be equal to width!");
volume.addReplica(this, width, osdSet, flags, userCreds);
}
public void addReplica(int width, String[] osdUuids, int flags) throws IOException {
addReplica(width, osdUuids, flags, userCreds);
}
public void setReplicaUpdatePolicy(String policy, UserCredentials userCreds) throws IOException {
volume.setxattr(this.getPath(), XTREEMFSSET_REPL_UPDATE_POLICY_XATTR, policy, userCreds);
}
public void setReplicaUpdatePolicy(String policy) throws IOException {
setReplicaUpdatePolicy(policy, userCreds);
}
public String getReplicaUpdatePolicy(UserCredentials userCreds) throws IOException {
try {
String loc = this.volume.getxattr(this.getPath(), "xtreemfs.locations", userCreds);
if ( (loc != null) && (loc.length() > 0) ) {
Map<String,Object> location = (Map<String, Object>) JSONParser.parseJSON(new JSONString(loc));
return (String) location.get("update-policy");
} else {
throw new IOException("cannot retrieve file's location list (is empty)");
}
} catch (JSONException ex) {
throw new IOException("cannot parse file's location list",ex);
} catch (ClassCastException ex) {
throw new IOException("cannot parse file's location list",ex);
}
}
public String getReplicaUpdatePolicy() throws IOException {
return getReplicaUpdatePolicy(userCreds);
}
Map<String,Object> getLocations(UserCredentials userCreds) throws IOException {
try {
String loc = this.volume.getxattr(this.getPath(), "xtreemfs.locations", userCreds);
if ( (loc != null) && (loc.length() > 0) ) {
return (Map<String, Object>) JSONParser.parseJSON(new JSONString(loc));
} else {
throw new IOException("cannot retrieve file's location list (is empty)");
}
} catch (JSONException ex) {
throw new IOException("cannot parse file's location list",ex);
} catch (ClassCastException ex) {
throw new IOException("cannot parse file's location list",ex);
}
}
void removeReplica(String headOSDuuid, UserCredentials userCreds) throws IOException {
if (!this.isFile())
throw new IOException("cannot remove replica from a non-file object");
volume.removeReplica(this, headOSDuuid, userCreds);
}
}
| bsd-3-clause |
chrisrico/XChange | xchange-dsx/src/test/java/org/knowm/xchange/dsx/dto/trade/DSXFeesJSONTest.java | 933 | package org.knowm.xchange.dsx.dto.trade;
import static org.assertj.core.api.Assertions.assertThat;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;
import org.junit.Test;
/** @author Mikhail Wall */
public class DSXFeesJSONTest {
@Test
public void testGetFees() throws IOException {
InputStream is =
DSXFeesJSONTest.class.getResourceAsStream(
"/org/knowm/xchange/dsx/dto/trade/example-fees-data.json");
ObjectMapper mapper = new ObjectMapper();
DSXFeesReturn fees = mapper.readValue(is, DSXFeesReturn.class);
DSXFeesResult result = fees.getReturnValue();
assertThat(result.getProgressiveCommissions().getCurrency()).isEqualTo("USD");
assertThat(result.getProgressiveCommissions().getCommissions().length).isEqualTo(7);
assertThat(result.getProgressiveCommissions().getIndexOfCurrentCommission()).isEqualTo(0);
}
}
| mit |
sake/bouncycastle-java | test/src/org/bouncycastle/pqc/crypto/test/McElieceKobaraImaiCipherTest.java | 3574 | package org.bouncycastle.pqc.crypto.test;
import java.security.SecureRandom;
import java.util.Random;
import org.bouncycastle.crypto.AsymmetricCipherKeyPair;
import org.bouncycastle.crypto.Digest;
import org.bouncycastle.crypto.digests.SHA256Digest;
import org.bouncycastle.crypto.params.ParametersWithRandom;
import org.bouncycastle.pqc.crypto.mceliece.McElieceCCA2KeyGenerationParameters;
import org.bouncycastle.pqc.crypto.mceliece.McElieceCCA2KeyPairGenerator;
import org.bouncycastle.pqc.crypto.mceliece.McElieceCCA2Parameters;
import org.bouncycastle.pqc.crypto.mceliece.McElieceKobaraImaiCipher;
import org.bouncycastle.pqc.crypto.mceliece.McElieceKobaraImaiDigestCipher;
import org.bouncycastle.util.test.SimpleTest;
public class McElieceKobaraImaiCipherTest
extends SimpleTest
{
SecureRandom keyRandom = new SecureRandom();
public String getName()
{
return "McElieceKobaraImai";
}
public void performTest()
{
int numPassesKPG = 1;
int numPassesEncDec = 10;
Random rand = new Random();
byte[] mBytes;
for (int j = 0; j < numPassesKPG; j++)
{
McElieceCCA2Parameters params = new McElieceCCA2Parameters();
McElieceCCA2KeyPairGenerator mcElieceCCA2KeyGen = new McElieceCCA2KeyPairGenerator();
McElieceCCA2KeyGenerationParameters genParam = new McElieceCCA2KeyGenerationParameters(keyRandom, params);
mcElieceCCA2KeyGen.init(genParam);
AsymmetricCipherKeyPair pair = mcElieceCCA2KeyGen.generateKeyPair();
ParametersWithRandom param = new ParametersWithRandom(pair.getPublic(), keyRandom);
Digest msgDigest = new SHA256Digest();
McElieceKobaraImaiDigestCipher mcElieceKobaraImaiDigestCipher = new McElieceKobaraImaiDigestCipher(new McElieceKobaraImaiCipher(), msgDigest);
for (int k = 1; k <= numPassesEncDec; k++)
{
System.out.println("############### test: " + k);
// initialize for encryption
mcElieceKobaraImaiDigestCipher.init(true, param);
// generate random message
int mLength = (rand.nextInt() & 0x1f) + 1;
mBytes = new byte[mLength];
rand.nextBytes(mBytes);
// encrypt
mcElieceKobaraImaiDigestCipher.update(mBytes, 0, mBytes.length);
byte[] enc = mcElieceKobaraImaiDigestCipher.messageEncrypt();
// initialize for decryption
mcElieceKobaraImaiDigestCipher.init(false, pair.getPrivate());
byte[] constructedmessage = mcElieceKobaraImaiDigestCipher.messageDecrypt(enc);
// XXX write in McElieceFujisakiDigestCipher?
msgDigest.update(mBytes, 0, mBytes.length);
byte[] hash = new byte[msgDigest.getDigestSize()];
msgDigest.doFinal(hash, 0);
boolean verified = true;
for (int i = 0; i < hash.length; i++)
{
verified = verified && hash[i] == constructedmessage[i];
}
if (!verified)
{
fail("en/decryption fails");
}
else
{
System.out.println("test okay");
System.out.println();
}
}
}
}
public static void main(
String[] args)
{
runTest(new McElieceKobaraImaiCipherTest());
}
}
| mit |
gazarenkov/che-sketch | ide/commons-gwt/src/main/java/org/eclipse/che/ide/commons/exception/ServerException.java | 3502 | /*******************************************************************************
* Copyright (c) 2012-2017 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.ide.commons.exception;
import org.eclipse.che.ide.rest.HTTPHeader;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.http.client.Response;
import java.util.HashMap;
import java.util.Map;
/**
* @author Vitaliy Gulyy
*/
@SuppressWarnings("serial")
public class ServerException extends Exception {
private Response response;
private String message = "";
private int errorCode;
private Map<String, String> attributes = new HashMap<>();
private boolean errorMessageProvided;
public ServerException(Response response) {
this.response = response;
this.errorMessageProvided = checkErrorMessageProvided();
this.message = getMessageFromJSON(response.getText());
this.errorCode = getErrorCodeFromJSON(response.getText());
// parseJsonAttributes(response.getText());
}
public ServerException(Response response, String message) {
this.response = response;
this.message = message;
}
public int getHTTPStatus() {
return response.getStatusCode();
}
public String getStatusText() {
return response.getStatusText();
}
@Override
public String getMessage() {
if (message != null) {
return message;
}
if (response.getText().isEmpty())
return response.getStatusText();
else
return response.getText();
}
public int getErrorCode() {
return errorCode;
}
public Map<String, String> getAttributes() {
return attributes;
}
@Override
public String toString() {
return getMessage();
}
private native String getMessageFromJSON(String json) /*-{
try {
return JSON.parse(json).message;
} catch (e) {
return null;
}
}-*/;
private native int getErrorCodeFromJSON(String json) /*-{
try {
var result = JSON.parse(json).errorCode;
if (result) {
return result;
}
} catch (e) {
}
return -1;
}-*/;
public String getHeader(String key) {
return response.getHeader(key);
}
private boolean checkErrorMessageProvided() {
String value = response.getHeader(HTTPHeader.JAXRS_BODY_PROVIDED);
if (value != null) {
return true;
}
return false;
}
// private native void parseJsonAttributes(String json) /*-{
// try {
// var attributes = JSON.parse(json).attributes;
// for(var key in attributes) {
// this.@org.eclipse.che.ide.commons.exception.ServerException.attributes::put(Ljava/lang/String;Ljava/lang/String;)(key, attributes[key]);
// }
//
// } catch (e) {
// console.log(e.message, e);
// }
// }-*/;
public boolean isErrorMessageProvided() {
return errorMessageProvided;
}
}
| epl-1.0 |
akervern/che | core/che-core-api-core/src/main/java/org/eclipse/che/api/core/rest/shared/dto/ServiceError.java | 1024 | /*
* Copyright (c) 2012-2018 Red Hat, Inc.
* This program and the accompanying materials are made
* available under the terms of the Eclipse Public License 2.0
* which is available at https://www.eclipse.org/legal/epl-2.0/
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
package org.eclipse.che.api.core.rest.shared.dto;
import org.eclipse.che.dto.shared.DTO;
/**
* Describes error which may be serialized to JSON format with {@link
* org.eclipse.che.api.core.rest.ApiExceptionMapper}
*
* @author <a href="mailto:andrew00x@gmail.com">Andrey Parfonov</a>
* @see org.eclipse.che.api.core.ApiException
* @see org.eclipse.che.api.core.rest.ApiExceptionMapper
*/
@DTO
public interface ServiceError {
/**
* Get error message.
*
* @return error message
*/
String getMessage();
ServiceError withMessage(String message);
/**
* Set error message.
*
* @param message error message
*/
void setMessage(String message);
}
| epl-1.0 |
impulze/SOS | coding/json/src/test/java/org/n52/sos/encode/json/impl/OwsExceptionReportEncoderTest.java | 3722 | /**
* Copyright (C) 2012-2015 52°North Initiative for Geospatial Open Source
* Software GmbH
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*
* If the program is linked with libraries which are licensed under one of
* the following licenses, the combination of the program with the linked
* library is not considered a "derivative work" of the program:
*
* - Apache License, version 2.0
* - Apache Software License, version 1.0
* - GNU Lesser General Public License, version 3
* - Mozilla Public License, versions 1.0, 1.1 and 2.0
* - Common Development and Distribution License (CDDL), version 1.0
*
* Therefore the distribution of the program linked with libraries licensed
* under the aforementioned licenses, is permitted by the copyright holders
* if the distribution is compliant with both the GNU General Public
* License version 2 and the aforementioned licenses.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
* Public License for more details.
*/
package org.n52.sos.encode.json.impl;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.n52.sos.coding.json.JSONConstants.EXCEPTIONS;
import static org.n52.sos.coding.json.JSONConstants.LOCATOR;
import static org.n52.sos.coding.json.JSONConstants.TEXT;
import static org.n52.sos.coding.json.JSONConstants.VERSION;
import static org.n52.sos.coding.json.matchers.Does.does;
import static org.n52.sos.coding.json.matchers.JSONMatchers.arrayOfLength;
import static org.n52.sos.coding.json.matchers.JSONMatchers.equalTo;
import static org.n52.sos.coding.json.matchers.JSONMatchers.exist;
import static org.n52.sos.coding.json.matchers.JSONMatchers.isObject;
import static org.n52.sos.coding.json.matchers.ValidationMatchers.instanceOf;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ErrorCollector;
import org.n52.sos.coding.json.SchemaConstants;
import org.n52.sos.encode.json.JSONEncodingException;
import org.n52.sos.exception.ows.concrete.EncoderResponseUnsupportedException;
import com.fasterxml.jackson.databind.JsonNode;
/**
* TODO JavaDoc
*
* @author Christian Autermann <c.autermann@52north.org>
*
* @since 4.0.0
*/
public class OwsExceptionReportEncoderTest {
private OwsExceptionReportEncoder enc;
@Rule
public final ErrorCollector e = new ErrorCollector();
@Before
public void setUp() {
enc = new OwsExceptionReportEncoder();
}
@Test
public void testExceptionWithoutCause() throws JSONEncodingException {
final EncoderResponseUnsupportedException owse = new EncoderResponseUnsupportedException();
owse.setVersion("2.0.0");
final JsonNode json = enc.encodeJSON(owse);
assertThat(json, is(notNullValue()));
final String message = "The encoder response is not supported!";
e.checkThat(json, is(instanceOf(SchemaConstants.Common.EXCEPTION_REPORT)));
e.checkThat(json.path(VERSION), is(equalTo("2.0.0")));
e.checkThat(json.path(EXCEPTIONS), is(arrayOfLength(1)));
e.checkThat(json.path(EXCEPTIONS).path(0), isObject());
e.checkThat(json.path(EXCEPTIONS).path(0).path(LOCATOR), does(not(exist())));
e.checkThat(json.path(EXCEPTIONS).path(0).path(TEXT), is(equalTo(message)));
}
}
| gpl-2.0 |
pplatek/adempiere | base/src/org/compiere/model/X_C_AcctProcessorLog.java | 6313 | /******************************************************************************
* Product: Adempiere ERP & CRM Smart Business Solution *
* Copyright (C) 1999-2007 ComPiere, Inc. All Rights Reserved. *
* This program is free software, you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY, without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program, if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
* For the text or an alternative of this public license, you may reach us *
* ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA *
* or via info@compiere.org or http://www.compiere.org/license.html *
*****************************************************************************/
/** Generated Model - DO NOT CHANGE */
package org.compiere.model;
import java.sql.ResultSet;
import java.util.Properties;
/** Generated Model for C_AcctProcessorLog
* @author Adempiere (generated)
* @version Release 3.8.0 - $Id$ */
public class X_C_AcctProcessorLog extends PO implements I_C_AcctProcessorLog, I_Persistent
{
/**
*
*/
private static final long serialVersionUID = 20150223L;
/** Standard Constructor */
public X_C_AcctProcessorLog (Properties ctx, int C_AcctProcessorLog_ID, String trxName)
{
super (ctx, C_AcctProcessorLog_ID, trxName);
/** if (C_AcctProcessorLog_ID == 0)
{
setC_AcctProcessorLog_ID (0);
setC_AcctProcessor_ID (0);
setIsError (false);
} */
}
/** Load Constructor */
public X_C_AcctProcessorLog (Properties ctx, ResultSet rs, String trxName)
{
super (ctx, rs, trxName);
}
/** AccessLevel
* @return 2 - Client
*/
protected int get_AccessLevel()
{
return accessLevel.intValue();
}
/** Load Meta Data */
protected POInfo initPO (Properties ctx)
{
POInfo poi = POInfo.getPOInfo (ctx, Table_ID, get_TrxName());
return poi;
}
public String toString()
{
StringBuffer sb = new StringBuffer ("X_C_AcctProcessorLog[")
.append(get_ID()).append("]");
return sb.toString();
}
/** Set Binary Data.
@param BinaryData
Binary Data
*/
public void setBinaryData (byte[] BinaryData)
{
set_Value (COLUMNNAME_BinaryData, BinaryData);
}
/** Get Binary Data.
@return Binary Data
*/
public byte[] getBinaryData ()
{
return (byte[])get_Value(COLUMNNAME_BinaryData);
}
/** Set Accounting Processor Log.
@param C_AcctProcessorLog_ID
Result of the execution of the Accounting Processor
*/
public void setC_AcctProcessorLog_ID (int C_AcctProcessorLog_ID)
{
if (C_AcctProcessorLog_ID < 1)
set_ValueNoCheck (COLUMNNAME_C_AcctProcessorLog_ID, null);
else
set_ValueNoCheck (COLUMNNAME_C_AcctProcessorLog_ID, Integer.valueOf(C_AcctProcessorLog_ID));
}
/** Get Accounting Processor Log.
@return Result of the execution of the Accounting Processor
*/
public int getC_AcctProcessorLog_ID ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_C_AcctProcessorLog_ID);
if (ii == null)
return 0;
return ii.intValue();
}
public org.compiere.model.I_C_AcctProcessor getC_AcctProcessor() throws RuntimeException
{
return (org.compiere.model.I_C_AcctProcessor)MTable.get(getCtx(), org.compiere.model.I_C_AcctProcessor.Table_Name)
.getPO(getC_AcctProcessor_ID(), get_TrxName()); }
/** Set Accounting Processor.
@param C_AcctProcessor_ID
Accounting Processor/Server Parameters
*/
public void setC_AcctProcessor_ID (int C_AcctProcessor_ID)
{
if (C_AcctProcessor_ID < 1)
set_ValueNoCheck (COLUMNNAME_C_AcctProcessor_ID, null);
else
set_ValueNoCheck (COLUMNNAME_C_AcctProcessor_ID, Integer.valueOf(C_AcctProcessor_ID));
}
/** Get Accounting Processor.
@return Accounting Processor/Server Parameters
*/
public int getC_AcctProcessor_ID ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_C_AcctProcessor_ID);
if (ii == null)
return 0;
return ii.intValue();
}
/** Set Description.
@param Description
Optional short description of the record
*/
public void setDescription (String Description)
{
set_Value (COLUMNNAME_Description, Description);
}
/** Get Description.
@return Optional short description of the record
*/
public String getDescription ()
{
return (String)get_Value(COLUMNNAME_Description);
}
/** Set Error.
@param IsError
An Error occurred in the execution
*/
public void setIsError (boolean IsError)
{
set_Value (COLUMNNAME_IsError, Boolean.valueOf(IsError));
}
/** Get Error.
@return An Error occurred in the execution
*/
public boolean isError ()
{
Object oo = get_Value(COLUMNNAME_IsError);
if (oo != null)
{
if (oo instanceof Boolean)
return ((Boolean)oo).booleanValue();
return "Y".equals(oo);
}
return false;
}
/** Set Reference.
@param Reference
Reference for this record
*/
public void setReference (String Reference)
{
set_Value (COLUMNNAME_Reference, Reference);
}
/** Get Reference.
@return Reference for this record
*/
public String getReference ()
{
return (String)get_Value(COLUMNNAME_Reference);
}
/** Set Summary.
@param Summary
Textual summary of this request
*/
public void setSummary (String Summary)
{
set_Value (COLUMNNAME_Summary, Summary);
}
/** Get Summary.
@return Textual summary of this request
*/
public String getSummary ()
{
return (String)get_Value(COLUMNNAME_Summary);
}
/** Set Text Message.
@param TextMsg
Text Message
*/
public void setTextMsg (String TextMsg)
{
set_Value (COLUMNNAME_TextMsg, TextMsg);
}
/** Get Text Message.
@return Text Message
*/
public String getTextMsg ()
{
return (String)get_Value(COLUMNNAME_TextMsg);
}
} | gpl-2.0 |
md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jvmti/IterateThroughHeap/abort/Abort.java | 2450 | /*
* Copyright (c) 2013, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
*
* @summary converted from VM Testbase nsk/jvmti/IterateThroughHeap/abort.
* VM Testbase keywords: [quick, jpda, jvmti, noras]
* VM Testbase readme:
* This test exercises JVMTI function IterateOverHeap().
* Test checks that if one of available callbacks returned JVMTI_VISIT_ABORT value,
* then iteration will be stopped and no more objects will be reported.
*
* @library /vmTestbase
* /test/lib
* @run driver jdk.test.lib.FileInstaller . .
* @run main/othervm/native -agentlib:Abort=-waittime=5 nsk.jvmti.IterateThroughHeap.abort.Abort
*/
package nsk.jvmti.IterateThroughHeap.abort;
import java.util.Random;
import java.io.PrintStream;
import nsk.share.*;
import nsk.share.jvmti.*;
public class Abort extends DebugeeClass {
static {
loadLibrary("Abort");
}
public static void main(String args[]) {
String[] argv = JVMTITest.commonInit(args);
System.exit(new Abort().runTest(argv,System.out) + Consts.JCK_STATUS_BASE);
}
protected Log log = null;
protected ArgumentHandler argHandler = null;
protected int status = Consts.TEST_PASSED;
public int runTest(String args[], PrintStream out) {
argHandler = new ArgumentHandler(args);
log = new Log(out, argHandler);
log.display("Verifying JVMTI_ABORT.");
status = checkStatus(status);
return status;
}
}
| gpl-2.0 |
neuroidss/adempiere | base/src/org/eevolution/model/I_PP_Order_NodeNext.java | 7756 | /******************************************************************************
* Product: Adempiere ERP & CRM Smart Business Solution *
* Copyright (C) 1999-2007 ComPiere, Inc. All Rights Reserved. *
* This program is free software, you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY, without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program, if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
* For the text or an alternative of this public license, you may reach us *
* ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA *
* or via info@compiere.org or http://www.compiere.org/license.html *
*****************************************************************************/
package org.eevolution.model;
import java.math.BigDecimal;
import java.sql.Timestamp;
import org.compiere.model.*;
import org.compiere.util.KeyNamePair;
/** Generated Interface for PP_Order_NodeNext
* @author Adempiere (generated)
* @version Release 3.8.0
*/
public interface I_PP_Order_NodeNext
{
/** TableName=PP_Order_NodeNext */
public static final String Table_Name = "PP_Order_NodeNext";
/** AD_Table_ID=53023 */
public static final int Table_ID = MTable.getTable_ID(Table_Name);
KeyNamePair Model = new KeyNamePair(Table_ID, Table_Name);
/** AccessLevel = 3 - Client - Org
*/
BigDecimal accessLevel = BigDecimal.valueOf(3);
/** Load Meta Data */
/** Column name AD_Client_ID */
public static final String COLUMNNAME_AD_Client_ID = "AD_Client_ID";
/** Get Client.
* Client/Tenant for this installation.
*/
public int getAD_Client_ID();
/** Column name AD_Org_ID */
public static final String COLUMNNAME_AD_Org_ID = "AD_Org_ID";
/** Set Organization.
* Organizational entity within client
*/
public void setAD_Org_ID (int AD_Org_ID);
/** Get Organization.
* Organizational entity within client
*/
public int getAD_Org_ID();
/** Column name AD_WF_Next_ID */
public static final String COLUMNNAME_AD_WF_Next_ID = "AD_WF_Next_ID";
/** Set Next Node.
* Next Node in workflow
*/
public void setAD_WF_Next_ID (int AD_WF_Next_ID);
/** Get Next Node.
* Next Node in workflow
*/
public int getAD_WF_Next_ID();
public org.compiere.model.I_AD_WF_Node getAD_WF_Next() throws RuntimeException;
/** Column name AD_WF_Node_ID */
public static final String COLUMNNAME_AD_WF_Node_ID = "AD_WF_Node_ID";
/** Set Node.
* Workflow Node (activity), step or process
*/
public void setAD_WF_Node_ID (int AD_WF_Node_ID);
/** Get Node.
* Workflow Node (activity), step or process
*/
public int getAD_WF_Node_ID();
public org.compiere.model.I_AD_WF_Node getAD_WF_Node() throws RuntimeException;
/** Column name Created */
public static final String COLUMNNAME_Created = "Created";
/** Get Created.
* Date this record was created
*/
public Timestamp getCreated();
/** Column name CreatedBy */
public static final String COLUMNNAME_CreatedBy = "CreatedBy";
/** Get Created By.
* User who created this records
*/
public int getCreatedBy();
/** Column name Description */
public static final String COLUMNNAME_Description = "Description";
/** Set Description.
* Optional short description of the record
*/
public void setDescription (String Description);
/** Get Description.
* Optional short description of the record
*/
public String getDescription();
/** Column name EntityType */
public static final String COLUMNNAME_EntityType = "EntityType";
/** Set Entity Type.
* Dictionary Entity Type;
Determines ownership and synchronization
*/
public void setEntityType (String EntityType);
/** Get Entity Type.
* Dictionary Entity Type;
Determines ownership and synchronization
*/
public String getEntityType();
/** Column name IsActive */
public static final String COLUMNNAME_IsActive = "IsActive";
/** Set Active.
* The record is active in the system
*/
public void setIsActive (boolean IsActive);
/** Get Active.
* The record is active in the system
*/
public boolean isActive();
/** Column name IsStdUserWorkflow */
public static final String COLUMNNAME_IsStdUserWorkflow = "IsStdUserWorkflow";
/** Set Std User Workflow.
* Standard Manual User Approval Workflow
*/
public void setIsStdUserWorkflow (boolean IsStdUserWorkflow);
/** Get Std User Workflow.
* Standard Manual User Approval Workflow
*/
public boolean isStdUserWorkflow();
/** Column name PP_Order_ID */
public static final String COLUMNNAME_PP_Order_ID = "PP_Order_ID";
/** Set Manufacturing Order.
* Manufacturing Order
*/
public void setPP_Order_ID (int PP_Order_ID);
/** Get Manufacturing Order.
* Manufacturing Order
*/
public int getPP_Order_ID();
public org.eevolution.model.I_PP_Order getPP_Order() throws RuntimeException;
/** Column name PP_Order_Next_ID */
public static final String COLUMNNAME_PP_Order_Next_ID = "PP_Order_Next_ID";
/** Set Manufacturing Order Activity Next */
public void setPP_Order_Next_ID (int PP_Order_Next_ID);
/** Get Manufacturing Order Activity Next */
public int getPP_Order_Next_ID();
public org.eevolution.model.I_PP_Order_Node getPP_Order_Next() throws RuntimeException;
/** Column name PP_Order_NodeNext_ID */
public static final String COLUMNNAME_PP_Order_NodeNext_ID = "PP_Order_NodeNext_ID";
/** Set Manufacturing Order Activity Next */
public void setPP_Order_NodeNext_ID (int PP_Order_NodeNext_ID);
/** Get Manufacturing Order Activity Next */
public int getPP_Order_NodeNext_ID();
/** Column name PP_Order_Node_ID */
public static final String COLUMNNAME_PP_Order_Node_ID = "PP_Order_Node_ID";
/** Set Manufacturing Order Activity.
* Workflow Node (activity), step or process
*/
public void setPP_Order_Node_ID (int PP_Order_Node_ID);
/** Get Manufacturing Order Activity.
* Workflow Node (activity), step or process
*/
public int getPP_Order_Node_ID();
public org.eevolution.model.I_PP_Order_Node getPP_Order_Node() throws RuntimeException;
/** Column name SeqNo */
public static final String COLUMNNAME_SeqNo = "SeqNo";
/** Set Sequence.
* Method of ordering records;
lowest number comes first
*/
public void setSeqNo (int SeqNo);
/** Get Sequence.
* Method of ordering records;
lowest number comes first
*/
public int getSeqNo();
/** Column name TransitionCode */
public static final String COLUMNNAME_TransitionCode = "TransitionCode";
/** Set Transition Code.
* Code resulting in TRUE of FALSE
*/
public void setTransitionCode (String TransitionCode);
/** Get Transition Code.
* Code resulting in TRUE of FALSE
*/
public String getTransitionCode();
/** Column name Updated */
public static final String COLUMNNAME_Updated = "Updated";
/** Get Updated.
* Date this record was updated
*/
public Timestamp getUpdated();
/** Column name UpdatedBy */
public static final String COLUMNNAME_UpdatedBy = "UpdatedBy";
/** Get Updated By.
* User who updated this records
*/
public int getUpdatedBy();
}
| gpl-2.0 |
emacs-mirror/emacs | test/lisp/align-resources/align-post.java | 119 | class X
{
String field1;
String[] field2;
int field3;
int[] field4;
X field5;
X[] field6;
}
| gpl-3.0 |
AydinSakar/sql-layer | src/test/java/com/foundationdb/util/WrappingByteSourceTest.java | 5032 | /**
* Copyright (C) 2009-2013 FoundationDB, LLC
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.foundationdb.util;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.util.Arrays;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public final class WrappingByteSourceTest {
@Test
public void totallyFine() {
byte[] bytes = new byte[10];
check(wrap(bytes, 3, 5), bytes, 3, 5);
}
@Test
public void simpleWrap() {
byte[] bytes = new byte[10];
check(wrap(bytes), bytes, 0, 10);
}
@Test
public void offsetAndLengthZero() {
byte[] bytes = new byte[10];
check(wrap(bytes, 0, 0), bytes, 0, 0);
}
@Test
public void offsetSizeAndLengthZero() {
byte[] bytes = new byte[0];
check(wrap(bytes, 0, 0), bytes, 0, 0);
}
@Test
public void offsetAtEdge() {
byte[] bytes = new byte[10];
check(wrap(bytes, 9, 0), bytes, 9, 0);
check(wrap(bytes, 10, 0), bytes, 10, 0);
}
@Test(expected = IllegalArgumentException.class)
public void offsetPastEdge() {
wrap(new byte[10], 11, 0);
}
@Test
public void lengthAtEdge() {
byte[] bytes = new byte[10];
check(wrap(bytes, 0, 10), bytes, 0, 10);
check(wrap(bytes, 1, 9), bytes, 1, 9);
}
@Test(expected = IllegalArgumentException.class)
public void lengthPastEdge() {
wrap(new byte[10], 1, 10);
}
@Test(expected = IllegalArgumentException.class)
public void nullBytes() {
wrap(null, 0, 0);
}
@Test(expected = IllegalArgumentException.class)
public void negativeOffset() {
wrap(new byte[10], -1, 0);
}
@Test(expected = IllegalArgumentException.class)
public void negativeLength() {
wrap(new byte[10], 0, -1);
}
@Test
public void byteBufferConversion() {
byte[] bytes = new byte[10];
ByteBuffer byteBuffer = ByteBuffer.wrap(bytes, 3, 4);
WrappingByteSource converted = WrappingByteSource.fromByteBuffer(byteBuffer);
WrappingByteSource manual = new WrappingByteSource().wrap(bytes, 3, 4);
assertEquals("converted WrappingByteSource", manual, converted);
}
@Test
public void equality() {
byte[] bytes = new byte[4*11];
ByteBuffer buffer = ByteBuffer.wrap(bytes);
for(int i=0; i < 5; ++i) {
buffer.putInt(i);
}
buffer.putInt(-1); // space
for(int i=0; i < 5; ++i) {
buffer.putInt(i);
}
WrappingByteSource one = new WrappingByteSource().wrap(bytes, 0, 4*5);
WrappingByteSource two = new WrappingByteSource().wrap(bytes, 6*4, 4*5);
assertEquals("equality", one, two);
assertEquals("hash codes", one.hashCode(), two.hashCode());
}
@Test
public void equalityShort() {
WrappingByteSource one = new WrappingByteSource().wrap(new byte[]{(byte)0xAB});
WrappingByteSource two = new WrappingByteSource().wrap(new byte[]{(byte)0xAB});
assertEquals("equality", one, two);
assertEquals("hash codes", one.hashCode(), two.hashCode());
}
@Test
public void equalityEmpty() {
WrappingByteSource one = new WrappingByteSource().wrap(new byte[0]);
WrappingByteSource two = new WrappingByteSource().wrap(new byte[0]);
assertEquals("equality", one, two);
assertEquals("hash codes", one.hashCode(), two.hashCode());
}
private static void check(ByteSource byteSource, byte[] expectedBytes, int expectedOffset, int expectedLength) {
byte[] actualBytes = byteSource.byteArray();
if (actualBytes != expectedBytes) {
fail("Not same instance: " + stringify(actualBytes) + " but expected " + stringify(expectedBytes));
}
assertEquals("offset", expectedOffset, byteSource.byteArrayOffset());
assertEquals("length", expectedLength, byteSource.byteArrayLength());
}
private static String stringify(byte[] bytes) {
return Arrays.toString(bytes);
}
private static WrappingByteSource wrap(byte[] bytes) {
return new WrappingByteSource().wrap(bytes);
}
private static WrappingByteSource wrap(byte[] bytes, int offset, int length) {
return new WrappingByteSource().wrap(bytes, offset, length);
}
}
| agpl-3.0 |
ilyessou/jfreechart | source/org/jfree/chart/renderer/category/LevelRenderer.java | 16972 | /* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2009, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Java is a trademark or registered trademark of Sun Microsystems, Inc.
* in the United States and other countries.]
*
* ------------------
* LevelRenderer.java
* ------------------
* (C) Copyright 2004-2009, by Object Refinery Limited.
*
* Original Author: David Gilbert (for Object Refinery Limited);
* Contributor(s): Peter Kolb (patch 2511330);
*
* Changes
* -------
* 09-Jan-2004 : Version 1 (DG);
* 05-Nov-2004 : Modified drawItem() signature (DG);
* 20-Apr-2005 : Renamed CategoryLabelGenerator
* --> CategoryItemLabelGenerator (DG);
* ------------- JFREECHART 1.0.x ---------------------------------------------
* 23-Jan-2006 : Renamed getMaxItemWidth() --> getMaximumItemWidth() (DG);
* 19-Jun-2007 : Removed deprecated code (DG);
* 20-Jun-2007 : Removed JCommon dependencies (DG);
* 29-Jun-2007 : Simplified entity generation by calling addEntity() (DG);
* 13-May-2008 : Code clean-up (DG);
* 26-Jun-2008 : Added crosshair support (DG);
* 23-Jan-2009 : Set more appropriate default shape in legend (DG);
* 23-Jan-2009 : Added support for seriesVisible flags - see patch
* 2511330 (PK)
*
*/
package org.jfree.chart.renderer.category;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Stroke;
import java.awt.geom.Line2D;
import java.awt.geom.Rectangle2D;
import java.io.Serializable;
import org.jfree.chart.axis.CategoryAxis;
import org.jfree.chart.axis.ValueAxis;
import org.jfree.chart.entity.EntityCollection;
import org.jfree.chart.event.RendererChangeEvent;
import org.jfree.chart.labels.CategoryItemLabelGenerator;
import org.jfree.chart.plot.CategoryPlot;
import org.jfree.chart.plot.PlotOrientation;
import org.jfree.chart.plot.PlotRenderingInfo;
import org.jfree.chart.util.HashUtilities;
import org.jfree.chart.util.PublicCloneable;
import org.jfree.chart.util.RectangleEdge;
import org.jfree.data.category.CategoryDataset;
/**
* A {@link CategoryItemRenderer} that draws individual data items as
* horizontal lines, spaced in the same way as bars in a bar chart. The
* example shown here is generated by the
* <code>OverlaidBarChartDemo2.java</code> program included in the JFreeChart
* Demo Collection:
* <br><br>
* <img src="../../../../../images/LevelRendererSample.png"
* alt="LevelRendererSample.png" />
*/
public class LevelRenderer extends AbstractCategoryItemRenderer
implements Cloneable, PublicCloneable, Serializable {
/** For serialization. */
private static final long serialVersionUID = -8204856624355025117L;
/** The default item margin percentage. */
public static final double DEFAULT_ITEM_MARGIN = 0.20;
/** The margin between items within a category. */
private double itemMargin;
/** The maximum item width as a percentage of the available space. */
private double maxItemWidth;
/**
* Creates a new renderer with default settings.
*/
public LevelRenderer() {
super();
this.itemMargin = DEFAULT_ITEM_MARGIN;
this.maxItemWidth = 1.0; // 100 percent, so it will not apply unless
// changed
setBaseLegendShape(new Rectangle2D.Float(-5.0f, -1.0f, 10.0f, 2.0f));
// set the outline paint to fully transparent, then the legend shape
// will just have the same colour as the lines drawn by the renderer
setBaseOutlinePaint(new Color(0, 0, 0, 0));
}
/**
* Returns the item margin.
*
* @return The margin.
*
* @see #setItemMargin(double)
*/
public double getItemMargin() {
return this.itemMargin;
}
/**
* Sets the item margin and sends a {@link RendererChangeEvent} to all
* registered listeners. The value is expressed as a percentage of the
* available width for plotting all the bars, with the resulting amount to
* be distributed between all the bars evenly.
*
* @param percent the new margin.
*
* @see #getItemMargin()
*/
public void setItemMargin(double percent) {
this.itemMargin = percent;
fireChangeEvent();
}
/**
* Returns the maximum width, as a percentage of the available drawing
* space.
*
* @return The maximum width.
*
* @see #setMaximumItemWidth(double)
*/
public double getMaximumItemWidth() {
return this.maxItemWidth;
}
/**
* Sets the maximum item width, which is specified as a percentage of the
* available space for all items, and sends a {@link RendererChangeEvent}
* to all registered listeners.
*
* @param percent the percent.
*
* @see #getMaximumItemWidth()
*/
public void setMaximumItemWidth(double percent) {
this.maxItemWidth = percent;
fireChangeEvent();
}
/**
* Initialises the renderer and returns a state object that will be passed
* to subsequent calls to the drawItem method.
* <p>
* This method gets called once at the start of the process of drawing a
* chart.
*
* @param g2 the graphics device.
* @param dataArea the area in which the data is to be plotted.
* @param plot the plot.
* @param rendererIndex the renderer index.
* @param info collects chart rendering information for return to caller.
*
* @return The renderer state.
*/
public CategoryItemRendererState initialise(Graphics2D g2,
Rectangle2D dataArea, CategoryPlot plot, CategoryDataset dataset,
PlotRenderingInfo info) {
CategoryItemRendererState state = super.initialise(g2, dataArea, plot,
dataset, info);
calculateItemWidth(plot, dataArea, dataset, state);
return state;
}
/**
* Calculates the bar width and stores it in the renderer state.
*
* @param plot the plot.
* @param dataArea the data area.
* @param dataset the dataset.
* @param state the renderer state.
*/
protected void calculateItemWidth(CategoryPlot plot,
Rectangle2D dataArea, CategoryDataset dataset,
CategoryItemRendererState state) {
CategoryAxis domainAxis = getDomainAxis(plot, dataset);
if (dataset != null) {
int columns = dataset.getColumnCount();
int rows = state.getVisibleSeriesCount() >= 0
? state.getVisibleSeriesCount() : dataset.getRowCount();
double space = 0.0;
PlotOrientation orientation = plot.getOrientation();
if (orientation == PlotOrientation.HORIZONTAL) {
space = dataArea.getHeight();
}
else if (orientation == PlotOrientation.VERTICAL) {
space = dataArea.getWidth();
}
double maxWidth = space * getMaximumItemWidth();
double categoryMargin = 0.0;
double currentItemMargin = 0.0;
if (columns > 1) {
categoryMargin = domainAxis.getCategoryMargin();
}
if (rows > 1) {
currentItemMargin = getItemMargin();
}
double used = space * (1 - domainAxis.getLowerMargin()
- domainAxis.getUpperMargin()
- categoryMargin - currentItemMargin);
if ((rows * columns) > 0) {
state.setBarWidth(Math.min(used / (rows * columns), maxWidth));
}
else {
state.setBarWidth(Math.min(used, maxWidth));
}
}
}
/**
* Calculates the coordinate of the first "side" of a bar. This will be
* the minimum x-coordinate for a vertical bar, and the minimum
* y-coordinate for a horizontal bar.
*
* @param plot the plot.
* @param orientation the plot orientation.
* @param dataArea the data area.
* @param domainAxis the domain axis.
* @param state the renderer state (has the bar width precalculated).
* @param row the row index.
* @param column the column index.
*
* @return The coordinate.
*/
protected double calculateBarW0(CategoryPlot plot,
PlotOrientation orientation,
Rectangle2D dataArea,
CategoryAxis domainAxis,
CategoryItemRendererState state,
int row,
int column) {
// calculate bar width...
double space = 0.0;
if (orientation == PlotOrientation.HORIZONTAL) {
space = dataArea.getHeight();
}
else {
space = dataArea.getWidth();
}
double barW0 = domainAxis.getCategoryStart(column, getColumnCount(),
dataArea, plot.getDomainAxisEdge());
int seriesCount = state.getVisibleSeriesCount();
if (seriesCount < 0) {
seriesCount = getRowCount();
}
int categoryCount = getColumnCount();
if (seriesCount > 1) {
double seriesGap = space * getItemMargin()
/ (categoryCount * (seriesCount - 1));
double seriesW = calculateSeriesWidth(space, domainAxis,
categoryCount, seriesCount);
barW0 = barW0 + row * (seriesW + seriesGap)
+ (seriesW / 2.0) - (state.getBarWidth() / 2.0);
}
else {
barW0 = domainAxis.getCategoryMiddle(column, getColumnCount(),
dataArea, plot.getDomainAxisEdge()) - state.getBarWidth()
/ 2.0;
}
return barW0;
}
/**
* Draws the bar for a single (series, category) data item.
*
* @param g2 the graphics device.
* @param state the renderer state.
* @param dataArea the data area.
* @param plot the plot.
* @param domainAxis the domain axis.
* @param rangeAxis the range axis.
* @param dataset the dataset.
* @param row the row index (zero-based).
* @param column the column index (zero-based).
* @param selected is the item selected?
* @param pass the pass index.
*/
public void drawItem(Graphics2D g2, CategoryItemRendererState state,
Rectangle2D dataArea, CategoryPlot plot, CategoryAxis domainAxis,
ValueAxis rangeAxis, CategoryDataset dataset, int row, int column,
boolean selected, int pass) {
// nothing is drawn if the row index is not included in the list with
// the indices of the visible rows...
int visibleRow = state.getVisibleSeriesIndex(row);
if (visibleRow < 0) {
return;
}
// nothing is drawn for null values...
Number dataValue = dataset.getValue(row, column);
if (dataValue == null) {
return;
}
double value = dataValue.doubleValue();
PlotOrientation orientation = plot.getOrientation();
double barW0 = calculateBarW0(plot, orientation, dataArea, domainAxis,
state, visibleRow, column);
RectangleEdge edge = plot.getRangeAxisEdge();
double barL = rangeAxis.valueToJava2D(value, dataArea, edge);
// draw the bar...
Line2D line = null;
double x = 0.0;
double y = 0.0;
if (orientation == PlotOrientation.HORIZONTAL) {
x = barL;
y = barW0 + state.getBarWidth() / 2.0;
line = new Line2D.Double(barL, barW0, barL,
barW0 + state.getBarWidth());
}
else {
x = barW0 + state.getBarWidth() / 2.0;
y = barL;
line = new Line2D.Double(barW0, barL, barW0 + state.getBarWidth(),
barL);
}
Stroke itemStroke = getItemStroke(row, column, selected);
Paint itemPaint = getItemPaint(row, column, selected);
g2.setStroke(itemStroke);
g2.setPaint(itemPaint);
g2.draw(line);
CategoryItemLabelGenerator generator = getItemLabelGenerator(row,
column, selected);
if (generator != null && isItemLabelVisible(row, column, selected)) {
drawItemLabel(g2, orientation, dataset, row, column, selected, x,
y, (value < 0.0));
}
// submit the current data point as a crosshair candidate
int datasetIndex = plot.indexOf(dataset);
updateCrosshairValues(state.getCrosshairState(),
dataset.getRowKey(row), dataset.getColumnKey(column), value,
datasetIndex, barW0, barL, orientation);
// collect entity and tool tip information...
EntityCollection entities = state.getEntityCollection();
if (entities != null) {
addEntity(entities, line.getBounds(),dataset, row, column,
selected);
}
}
/**
* Calculates the available space for each series.
*
* @param space the space along the entire axis (in Java2D units).
* @param axis the category axis.
* @param categories the number of categories.
* @param series the number of series.
*
* @return The width of one series.
*/
protected double calculateSeriesWidth(double space, CategoryAxis axis,
int categories, int series) {
double factor = 1.0 - getItemMargin() - axis.getLowerMargin()
- axis.getUpperMargin();
if (categories > 1) {
factor = factor - axis.getCategoryMargin();
}
return (space * factor) / (categories * series);
}
/**
* Returns the Java2D coordinate for the middle of the specified data item.
*
* @param rowKey the row key.
* @param columnKey the column key.
* @param dataset the dataset.
* @param axis the axis.
* @param area the drawing area.
* @param edge the edge along which the axis lies.
*
* @return The Java2D coordinate.
*
* @since 1.0.11
*/
public double getItemMiddle(Comparable rowKey, Comparable columnKey,
CategoryDataset dataset, CategoryAxis axis, Rectangle2D area,
RectangleEdge edge) {
return axis.getCategorySeriesMiddle(columnKey, rowKey, dataset,
this.itemMargin, area, edge);
}
/**
* Tests an object for equality with this instance.
*
* @param obj the object (<code>null</code> permitted).
*
* @return A boolean.
*/
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof LevelRenderer)) {
return false;
}
LevelRenderer that = (LevelRenderer) obj;
if (this.itemMargin != that.itemMargin) {
return false;
}
if (this.maxItemWidth != that.maxItemWidth) {
return false;
}
return super.equals(obj);
}
/**
* Returns a hash code for this instance.
*
* @return A hash code.
*/
public int hashCode() {
int hash = super.hashCode();
hash = HashUtilities.hashCode(hash, this.itemMargin);
hash = HashUtilities.hashCode(hash, this.maxItemWidth);
return hash;
}
}
| lgpl-2.1 |
lbndev/sonarqube | tests/upgrade/projects/struts-1.3.9-diet/core/src/main/java/org/apache/struts/chain/Constants.java | 4748 | /*
* $Id: Constants.java 471754 2006-11-06 14:55:09Z husted $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts.chain;
/**
* <p>Global constants for the Chain of Responsibility Library.</p>
*/
public final class Constants {
// -------------------------------------------------- Context Attribute Keys
/**
* <p>The default context attribute under which the Action for the current
* request will be stored.</p>
*/
public static final String ACTION_KEY = "action";
/**
* <p>The default context attribute under which the ActionConfig for the
* current request will be stored.</p>
*/
public static final String ACTION_CONFIG_KEY = "actionConfig";
/**
* <p>The default context attribute under which the ActionForm for the
* current request will be stored.</p>
*/
public static final String ACTION_FORM_KEY = "actionForm";
/**
* <p>The default context attribute under which the ActionServet for the
* current application will be stored.</p>
*/
public static final String ACTION_SERVLET_KEY = "actionServlet";
/**
* <p>The default context attribute under which a boolean flag indicating
* whether this request has been cancelled will be stored.</p>
*/
public static final String CANCEL_KEY = "cancel";
/**
* <p>The default context attribute under which an Exception will be
* stored before passing it to an exception handler chain.</p>
*/
public static final String EXCEPTION_KEY = "exception";
/**
* <p>The default context attribute under which the ForwardConfig for the
* current request will be stored.</p>
*/
public static final String FORWARD_CONFIG_KEY = "forwardConfig";
/**
* <p>The default context attribute under which the include path for the
* current request will be stored.</p>
*/
public static final String INCLUDE_KEY = "include";
/**
* <p>The default context attribute under which the Locale for the current
* request will be stored.</p>
*/
public static final String LOCALE_KEY = "locale";
/**
* <p>The default context attribute under which the MessageResources for
* the current request will be stored.</p>
*/
public static final String MESSAGE_RESOURCES_KEY = "messageResources";
/**
* <p>The default context attribute under which the ModuleConfig for the
* current request will be stored.</p>
*/
public static final String MODULE_CONFIG_KEY = "moduleConfig";
/**
* <p>The default context attribute key under which a Boolean is stored,
* indicating the valid state of the current request. If not present, a
* value of Boolean.FALSE should be assumed.
*/
public static final String VALID_KEY = "valid";
// --------------------------------------------------------- Other Constants
/**
* <p>The base part of the context attribute under which a Map containing
* the Action instances associated with this module are stored. This value
* must be suffixed with the module prefix in order to create a unique key
* per module.</p>
*/
public static final String ACTIONS_KEY = "actions";
/**
* <p>The context attribute under which the Catalog containing our defined
* command chains has been stored.</p>
*/
public static final String CATALOG_ATTR = "org.apache.struts.chain.CATALOG";
/**
* <p>The request attribute under which the path information is stored for
* processing during a RequestDispatcher.include() call.</p>
*/
public static final String INCLUDE_PATH_INFO =
"javax.servlet.include.path_info";
/**
* <p>The request attribute under which the servlet path is stored for
* processing during a RequestDispatcher.include() call.</p>
*/
public static final String INCLUDE_SERVLET_PATH =
"javax.servlet.include.servlet_path";
}
| lgpl-3.0 |
droolsjbpm/jbpm | jbpm-xes/src/main/java/org/jbpm/xes/mapper/LogTypeMapper.java | 4605 | /*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.xes.mapper;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.function.BiFunction;
import org.jbpm.xes.model.ClassifierType;
import org.jbpm.xes.model.ExtensionType;
import org.jbpm.xes.model.GlobalsType;
import org.jbpm.xes.model.LogType;
public class LogTypeMapper implements BiFunction<String, String, LogType> {
@Override
public LogType apply(String processName,
String processId) {
final LogType log = new LogType();
log.setXesFeatures("");
log.setXesVersion("2.0");
log.getExtension().addAll(getExtensions());
log.getClassifier().add(new ClassifierType("ByNodeName",
"concept:name", "event"));
log.getClassifier().add(new ClassifierType("ByNodeId",
"jbpm:nodeid", "event"));
log.getClassifier().add(new ClassifierType("ByNameAndTransition",
"concept:name lifecycle:transition", "event"));
log.getClassifier().add(new ClassifierType("ByResource",
"org:resource", "event"));
log.getGlobal().addAll(getGlobals());
log.addStringType("source",
"jBPM");
log.addStringType("lifecycle:model",
"standard");
log.addStringType("concept:name",
processName);
log.addStringType("jbpm:processid",
processId);
return log;
}
protected List<ExtensionType> getExtensions() {
return Arrays.asList(
new ExtensionType("Lifecycle",
"lifecycle",
"http://www.xes-standard.org/lifecycle.xesext"),
new ExtensionType("Organizational",
"org",
"http://www.xes-standard.org/org.xesext"),
new ExtensionType("Time",
"time",
"http://www.xes-standard.org/time.xesext"),
new ExtensionType("Concept",
"concept",
"http://www.xes-standard.org/concept.xesext")
);
}
protected List<GlobalsType> getGlobals() {
// Attributes that are always present
final GlobalsType trace = new GlobalsType("trace");
trace.addStringType("concept:name",
"");
trace.addDateType("jbpm:start",
new Date());
trace.addStringType("jbpm:status",
"");
trace.addStringType("jbpm:version",
"");
trace.addStringType("jbpm:description",
"");
trace.addIntegerType("jbpm:instanceid",
0);
trace.addStringType("jbpm:correlationkey",
"");
trace.addIntegerType("jbpm:logid",
0);
final GlobalsType event = new GlobalsType("event");
event.addDateType("time:timestamp",
new Date());
event.addStringType("concept:name",
"");
event.addStringType("org:resource",
"");
event.addStringType("lifecycle:transition",
"");
event.addStringType("jbpm:nodeinstanceid",
"");
event.addStringType("jbpm:nodeid",
"");
event.addStringType("jbpm:nodetype",
"");
event.addIntegerType("jbpm:logid",
0);
return Arrays.asList(
trace,
event
);
}
}
| apache-2.0 |
pisfly/logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/config/AppendersPlugin.java | 1872 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.config;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.config.plugins.Plugin;
import org.apache.logging.log4j.core.config.plugins.PluginElement;
import org.apache.logging.log4j.core.config.plugins.PluginFactory;
/**
* An Appender container.
*/
@Plugin(name = "appenders", category = "Core")
public final class AppendersPlugin {
private AppendersPlugin() {
}
/**
* Create a Map of the Appenders.
* @param appenders An array of Appenders.
* @return The Appender Map.
*/
@PluginFactory
public static ConcurrentMap<String, Appender> createAppenders(
@PluginElement("Appenders") final Appender[] appenders) {
final ConcurrentMap<String, Appender> map =
new ConcurrentHashMap<>();
for (final Appender appender : appenders) {
map.put(appender.getName(), appender);
}
return map;
}
}
| apache-2.0 |
hgschmie/presto | presto-main/src/main/java/io/prestosql/cost/CostComparator.java | 2876 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.cost;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Ordering;
import io.prestosql.Session;
import io.prestosql.sql.analyzer.FeaturesConfig;
import javax.inject.Inject;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
public class CostComparator
{
private final double cpuWeight;
private final double memoryWeight;
private final double networkWeight;
@Inject
public CostComparator(FeaturesConfig featuresConfig)
{
this(featuresConfig.getCpuCostWeight(), featuresConfig.getMemoryCostWeight(), featuresConfig.getNetworkCostWeight());
}
@VisibleForTesting
public CostComparator(double cpuWeight, double memoryWeight, double networkWeight)
{
checkArgument(cpuWeight >= 0, "cpuWeight cannot be negative");
checkArgument(memoryWeight >= 0, "memoryWeight cannot be negative");
checkArgument(networkWeight >= 0, "networkWeight cannot be negative");
this.cpuWeight = cpuWeight;
this.memoryWeight = memoryWeight;
this.networkWeight = networkWeight;
}
public Ordering<PlanCostEstimate> forSession(Session session)
{
requireNonNull(session, "session is null");
return Ordering.from((left, right) -> this.compare(session, left, right));
}
public int compare(Session session, PlanCostEstimate left, PlanCostEstimate right)
{
requireNonNull(session, "session is null");
requireNonNull(left, "left is null");
requireNonNull(right, "right is null");
checkArgument(!left.hasUnknownComponents() && !right.hasUnknownComponents(), "cannot compare unknown costs");
// TODO when one left.getMaxMemory() and right.getMaxMemory() exceeds query memory limit * configurable safety margin, choose the plan with lower memory usage
double leftCost = left.getCpuCost() * cpuWeight
+ left.getMaxMemory() * memoryWeight
+ left.getNetworkCost() * networkWeight;
double rightCost = right.getCpuCost() * cpuWeight
+ right.getMaxMemory() * memoryWeight
+ right.getNetworkCost() * networkWeight;
return Double.compare(leftCost, rightCost);
}
}
| apache-2.0 |
robin13/elasticsearch | libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/StoreTrustConfigTests.java | 8154 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common.ssl;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509ExtendedTrustManager;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.security.Principal;
import java.security.cert.X509Certificate;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.nullValue;
public class StoreTrustConfigTests extends ESTestCase {
private static final char[] P12_PASS = "p12-pass".toCharArray();
private static final char[] JKS_PASS = "jks-pass".toCharArray();
private static final String DEFAULT_ALGORITHM = TrustManagerFactory.getDefaultAlgorithm();
public void testBuildTrustConfigFromPKCS12() throws Exception {
assumeFalse("Can't use JKS/PKCS12 keystores in a FIPS JVM", inFipsJvm());
final Path ks = getDataPath("/certs/ca1/ca.p12");
final StoreTrustConfig trustConfig = new StoreTrustConfig(ks, P12_PASS, "PKCS12", DEFAULT_ALGORITHM);
assertThat(trustConfig.getDependentFiles(), Matchers.containsInAnyOrder(ks));
assertCertificateChain(trustConfig, "CN=Test CA 1");
}
public void testBuildTrustConfigFromJKS() throws Exception {
assumeFalse("Can't use JKS/PKCS12 keystores in a FIPS JVM", inFipsJvm());
final Path ks = getDataPath("/certs/ca-all/ca.jks");
final StoreTrustConfig trustConfig = new StoreTrustConfig(ks, JKS_PASS, "jks", DEFAULT_ALGORITHM);
assertThat(trustConfig.getDependentFiles(), Matchers.containsInAnyOrder(ks));
assertCertificateChain(trustConfig, "CN=Test CA 1", "CN=Test CA 2", "CN=Test CA 3");
}
public void testBadKeyStoreFormatFails() throws Exception {
assumeFalse("Can't use JKS/PKCS12 keystores in a FIPS JVM", inFipsJvm());
final Path ks = createTempFile("ca", ".p12");
Files.write(ks, randomByteArrayOfLength(128), StandardOpenOption.APPEND);
final StoreTrustConfig trustConfig = new StoreTrustConfig(ks, new char[0], randomFrom("PKCS12", "jks"), DEFAULT_ALGORITHM);
assertThat(trustConfig.getDependentFiles(), Matchers.containsInAnyOrder(ks));
assertInvalidFileFormat(trustConfig, ks);
}
public void testMissingKeyStoreFailsWithMeaningfulMessage() throws Exception {
assumeFalse("Can't use JKS/PKCS12 keystores in a FIPS JVM", inFipsJvm());
final Path ks = getDataPath("/certs/ca-all/ca.p12").getParent().resolve("keystore.dne");
final StoreTrustConfig trustConfig = new StoreTrustConfig(ks, new char[0], randomFrom("PKCS12", "jks"), DEFAULT_ALGORITHM);
assertThat(trustConfig.getDependentFiles(), Matchers.containsInAnyOrder(ks));
assertFileNotFound(trustConfig, ks);
}
public void testIncorrectPasswordFailsWithMeaningfulMessage() throws Exception {
assumeFalse("Can't use JKS/PKCS12 keystores in a FIPS JVM", inFipsJvm());
final Path ks = getDataPath("/certs/ca1/ca.p12");
final StoreTrustConfig trustConfig = new StoreTrustConfig(ks, new char[0], "PKCS12", DEFAULT_ALGORITHM);
assertThat(trustConfig.getDependentFiles(), Matchers.containsInAnyOrder(ks));
assertPasswordIsIncorrect(trustConfig, ks);
}
public void testMissingTrustEntriesFailsWithMeaningfulMessage() throws Exception {
assumeFalse("Can't use JKS/PKCS12 keystores in a FIPS JVM", inFipsJvm());
final Path ks;
final char[] password;
final String type;
if (randomBoolean()) {
type = "PKCS12";
ks = getDataPath("/certs/cert-all/certs.p12");
password = P12_PASS;
} else {
type = "jks";
ks = getDataPath("/certs/cert-all/certs.jks");
password = JKS_PASS;
}
final StoreTrustConfig trustConfig = new StoreTrustConfig(ks, password, type, DEFAULT_ALGORITHM);
assertThat(trustConfig.getDependentFiles(), Matchers.containsInAnyOrder(ks));
assertNoCertificateEntries(trustConfig, ks);
}
public void testTrustConfigReloadsKeysStoreContents() throws Exception {
assumeFalse("Can't use JKS/PKCS12 keystores in a FIPS JVM", inFipsJvm());
final Path ks1 = getDataPath("/certs/ca1/ca.p12");
final Path ksAll = getDataPath("/certs/ca-all/ca.p12");
final Path ks = createTempFile("ca", "p12");
final StoreTrustConfig trustConfig = new StoreTrustConfig(ks, P12_PASS, "PKCS12", DEFAULT_ALGORITHM);
Files.copy(ks1, ks, StandardCopyOption.REPLACE_EXISTING);
assertCertificateChain(trustConfig, "CN=Test CA 1");
Files.delete(ks);
assertFileNotFound(trustConfig, ks);
Files.write(ks, randomByteArrayOfLength(128), StandardOpenOption.CREATE);
assertInvalidFileFormat(trustConfig, ks);
Files.copy(ksAll, ks, StandardCopyOption.REPLACE_EXISTING);
assertCertificateChain(trustConfig, "CN=Test CA 1", "CN=Test CA 2", "CN=Test CA 3");
}
private void assertCertificateChain(StoreTrustConfig trustConfig, String... caNames) {
final X509ExtendedTrustManager trustManager = trustConfig.createTrustManager();
final X509Certificate[] issuers = trustManager.getAcceptedIssuers();
final Set<String> issuerNames = Stream.of(issuers)
.map(X509Certificate::getSubjectDN)
.map(Principal::getName)
.collect(Collectors.toSet());
assertThat(issuerNames, Matchers.containsInAnyOrder(caNames));
}
private void assertInvalidFileFormat(StoreTrustConfig trustConfig, Path file) {
final SslConfigException exception = expectThrows(SslConfigException.class, trustConfig::createTrustManager);
assertThat(exception.getMessage(), Matchers.containsString("cannot read"));
assertThat(exception.getMessage(), Matchers.containsString("keystore"));
assertThat(exception.getMessage(), Matchers.containsString(file.toAbsolutePath().toString()));
assertThat(exception.getCause(), Matchers.instanceOf(IOException.class));
}
private void assertFileNotFound(StoreTrustConfig trustConfig, Path file) {
final SslConfigException exception = expectThrows(SslConfigException.class, trustConfig::createTrustManager);
assertThat(exception.getMessage(), Matchers.containsString("file does not exist"));
assertThat(exception.getMessage(), Matchers.containsString("keystore"));
assertThat(exception.getMessage(), Matchers.containsString(file.toAbsolutePath().toString()));
assertThat(exception.getCause(), nullValue());
}
private void assertPasswordIsIncorrect(StoreTrustConfig trustConfig, Path key) {
final SslConfigException exception = expectThrows(SslConfigException.class, trustConfig::createTrustManager);
assertThat(exception.getMessage(), containsString("keystore"));
assertThat(exception.getMessage(), containsString(key.toAbsolutePath().toString()));
assertThat(exception.getMessage(), containsString("password"));
}
private void assertNoCertificateEntries(StoreTrustConfig trustConfig, Path file) {
final SslConfigException exception = expectThrows(SslConfigException.class, trustConfig::createTrustManager);
assertThat(exception.getMessage(), Matchers.containsString("does not contain any trusted certificate entries"));
assertThat(exception.getMessage(), Matchers.containsString("truststore"));
assertThat(exception.getMessage(), Matchers.containsString(file.toAbsolutePath().toString()));
}
}
| apache-2.0 |
vineetgarg02/hive | storage-api/src/java/org/apache/hive/common/util/Murmur3.java | 15594 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.common.util;
/**
* Murmur3 is successor to Murmur2 fast non-crytographic hash algorithms.
*
* Murmur3 32 and 128 bit variants.
* 32-bit Java port of https://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp#94
* 128-bit Java port of https://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp#255
*
* This is a public domain code with no copyrights.
* From homepage of MurmurHash (https://code.google.com/p/smhasher/),
* "All MurmurHash versions are public domain software, and the author disclaims all copyright
* to their code."
*/
public class Murmur3 {
// from 64-bit linear congruential generator
public static final long NULL_HASHCODE = 2862933555777941757L;
// Constants for 32 bit variant
private static final int C1_32 = 0xcc9e2d51;
private static final int C2_32 = 0x1b873593;
private static final int R1_32 = 15;
private static final int R2_32 = 13;
private static final int M_32 = 5;
private static final int N_32 = 0xe6546b64;
// Constants for 128 bit variant
private static final long C1 = 0x87c37b91114253d5L;
private static final long C2 = 0x4cf5ad432745937fL;
private static final int R1 = 31;
private static final int R2 = 27;
private static final int R3 = 33;
private static final int M = 5;
private static final int N1 = 0x52dce729;
private static final int N2 = 0x38495ab5;
public static final int DEFAULT_SEED = 104729;
public static int hash32(long l0, long l1) {
return hash32(l0, l1, DEFAULT_SEED);
}
public static int hash32(long l0) {
return hash32(l0, DEFAULT_SEED);
}
/**
* Murmur3 32-bit variant.
*/
public static int hash32(long l0, int seed) {
int hash = seed;
final long r0 = Long.reverseBytes(l0);
hash = mix32((int) r0, hash);
hash = mix32((int) (r0 >>> 32), hash);
return fmix32(Long.BYTES, hash);
}
/**
* Murmur3 32-bit variant.
*/
public static int hash32(long l0, long l1, int seed) {
int hash = seed;
final long r0 = Long.reverseBytes(l0);
final long r1 = Long.reverseBytes(l1);
hash = mix32((int) r0, hash);
hash = mix32((int) (r0 >>> 32), hash);
hash = mix32((int) (r1), hash);
hash = mix32((int) (r1 >>> 32), hash);
return fmix32(Long.BYTES * 2, hash);
}
/**
* Murmur3 32-bit variant.
*
* @param data - input byte array
* @return - hashcode
*/
public static int hash32(byte[] data) {
return hash32(data, 0, data.length, DEFAULT_SEED);
}
/**
* Murmur3 32-bit variant.
*
* @param data - input byte array
* @param length - length of array
* @return - hashcode
*/
public static int hash32(byte[] data, int length) {
return hash32(data, 0, length, DEFAULT_SEED);
}
/**
* Murmur3 32-bit variant.
*
* @param data - input byte array
* @param length - length of array
* @param seed - seed. (default 0)
* @return - hashcode
*/
public static int hash32(byte[] data, int length, int seed) {
return hash32(data, 0, length, seed);
}
/**
* Murmur3 32-bit variant.
*
* @param data - input byte array
* @param offset - offset of data
* @param length - length of array
* @param seed - seed. (default 0)
* @return - hashcode
*/
@SuppressFBWarnings(value = {"SF_SWITCH_FALLTHROUGH", "SF_SWITCH_NO_DEFAULT"}, justification = "Expected")
public static int hash32(byte[] data, int offset, int length, int seed) {
int hash = seed;
final int nblocks = length >> 2;
// body
for (int i = 0; i < nblocks; i++) {
int i_4 = i << 2;
int k = (data[offset + i_4] & 0xff)
| ((data[offset + i_4 + 1] & 0xff) << 8)
| ((data[offset + i_4 + 2] & 0xff) << 16)
| ((data[offset + i_4 + 3] & 0xff) << 24);
hash = mix32(k, hash);
}
// tail
int idx = nblocks << 2;
int k1 = 0;
switch (length - idx) {
case 3:
k1 ^= data[offset + idx + 2] << 16;
case 2:
k1 ^= data[offset + idx + 1] << 8;
case 1:
k1 ^= data[offset + idx];
// mix functions
k1 *= C1_32;
k1 = Integer.rotateLeft(k1, R1_32);
k1 *= C2_32;
hash ^= k1;
}
return fmix32(length, hash);
}
private static int mix32(int k, int hash) {
k *= C1_32;
k = Integer.rotateLeft(k, R1_32);
k *= C2_32;
hash ^= k;
return Integer.rotateLeft(hash, R2_32) * M_32 + N_32;
}
private static int fmix32(int length, int hash) {
hash ^= length;
hash ^= (hash >>> 16);
hash *= 0x85ebca6b;
hash ^= (hash >>> 13);
hash *= 0xc2b2ae35;
hash ^= (hash >>> 16);
return hash;
}
/**
* Murmur3 64-bit variant. This is essentially MSB 8 bytes of Murmur3 128-bit variant.
*
* @param data - input byte array
* @return - hashcode
*/
public static long hash64(byte[] data) {
return hash64(data, 0, data.length, DEFAULT_SEED);
}
public static long hash64(long data) {
long hash = DEFAULT_SEED;
long k = Long.reverseBytes(data);
int length = Long.BYTES;
// mix functions
k *= C1;
k = Long.rotateLeft(k, R1);
k *= C2;
hash ^= k;
hash = Long.rotateLeft(hash, R2) * M + N1;
// finalization
hash ^= length;
hash = fmix64(hash);
return hash;
}
public static long hash64(int data) {
long k1 = Integer.reverseBytes(data) & (-1L >>> 32);
int length = Integer.BYTES;
long hash = DEFAULT_SEED;
k1 *= C1;
k1 = Long.rotateLeft(k1, R1);
k1 *= C2;
hash ^= k1;
// finalization
hash ^= length;
hash = fmix64(hash);
return hash;
}
public static long hash64(short data) {
long hash = DEFAULT_SEED;
long k1 = 0;
k1 ^= ((long) data & 0xff) << 8;
k1 ^= ((long)((data & 0xFF00) >> 8) & 0xff);
k1 *= C1;
k1 = Long.rotateLeft(k1, R1);
k1 *= C2;
hash ^= k1;
// finalization
hash ^= Short.BYTES;
hash = fmix64(hash);
return hash;
}
public static long hash64(byte[] data, int offset, int length) {
return hash64(data, offset, length, DEFAULT_SEED);
}
/**
* Murmur3 64-bit variant. This is essentially MSB 8 bytes of Murmur3 128-bit variant.
*
* @param data - input byte array
* @param length - length of array
* @param seed - seed. (default is 0)
* @return - hashcode
*/
@SuppressFBWarnings(value = "SF_SWITCH_NO_DEFAULT", justification = "Expected")
public static long hash64(byte[] data, int offset, int length, int seed) {
long hash = seed;
final int nblocks = length >> 3;
// body
for (int i = 0; i < nblocks; i++) {
final int i8 = i << 3;
long k = ((long) data[offset + i8] & 0xff)
| (((long) data[offset + i8 + 1] & 0xff) << 8)
| (((long) data[offset + i8 + 2] & 0xff) << 16)
| (((long) data[offset + i8 + 3] & 0xff) << 24)
| (((long) data[offset + i8 + 4] & 0xff) << 32)
| (((long) data[offset + i8 + 5] & 0xff) << 40)
| (((long) data[offset + i8 + 6] & 0xff) << 48)
| (((long) data[offset + i8 + 7] & 0xff) << 56);
// mix functions
k *= C1;
k = Long.rotateLeft(k, R1);
k *= C2;
hash ^= k;
hash = Long.rotateLeft(hash, R2) * M + N1;
}
// tail
long k1 = 0;
int tailStart = nblocks << 3;
switch (length - tailStart) {
case 7:
k1 ^= ((long) data[offset + tailStart + 6] & 0xff) << 48;
case 6:
k1 ^= ((long) data[offset + tailStart + 5] & 0xff) << 40;
case 5:
k1 ^= ((long) data[offset + tailStart + 4] & 0xff) << 32;
case 4:
k1 ^= ((long) data[offset + tailStart + 3] & 0xff) << 24;
case 3:
k1 ^= ((long) data[offset + tailStart + 2] & 0xff) << 16;
case 2:
k1 ^= ((long) data[offset + tailStart + 1] & 0xff) << 8;
case 1:
k1 ^= ((long) data[offset + tailStart] & 0xff);
k1 *= C1;
k1 = Long.rotateLeft(k1, R1);
k1 *= C2;
hash ^= k1;
}
// finalization
hash ^= length;
hash = fmix64(hash);
return hash;
}
/**
* Murmur3 128-bit variant.
*
* @param data - input byte array
* @return - hashcode (2 longs)
*/
public static long[] hash128(byte[] data) {
return hash128(data, 0, data.length, DEFAULT_SEED);
}
/**
* Murmur3 128-bit variant.
*
* @param data - input byte array
* @param offset - the first element of array
* @param length - length of array
* @param seed - seed. (default is 0)
* @return - hashcode (2 longs)
*/
@SuppressFBWarnings(value = "SF_SWITCH_NO_DEFAULT", justification = "Expected")
public static long[] hash128(byte[] data, int offset, int length, int seed) {
long h1 = seed;
long h2 = seed;
final int nblocks = length >> 4;
// body
for (int i = 0; i < nblocks; i++) {
final int i16 = i << 4;
long k1 = ((long) data[offset + i16] & 0xff)
| (((long) data[offset + i16 + 1] & 0xff) << 8)
| (((long) data[offset + i16 + 2] & 0xff) << 16)
| (((long) data[offset + i16 + 3] & 0xff) << 24)
| (((long) data[offset + i16 + 4] & 0xff) << 32)
| (((long) data[offset + i16 + 5] & 0xff) << 40)
| (((long) data[offset + i16 + 6] & 0xff) << 48)
| (((long) data[offset + i16 + 7] & 0xff) << 56);
long k2 = ((long) data[offset + i16 + 8] & 0xff)
| (((long) data[offset + i16 + 9] & 0xff) << 8)
| (((long) data[offset + i16 + 10] & 0xff) << 16)
| (((long) data[offset + i16 + 11] & 0xff) << 24)
| (((long) data[offset + i16 + 12] & 0xff) << 32)
| (((long) data[offset + i16 + 13] & 0xff) << 40)
| (((long) data[offset + i16 + 14] & 0xff) << 48)
| (((long) data[offset + i16 + 15] & 0xff) << 56);
// mix functions for k1
k1 *= C1;
k1 = Long.rotateLeft(k1, R1);
k1 *= C2;
h1 ^= k1;
h1 = Long.rotateLeft(h1, R2);
h1 += h2;
h1 = h1 * M + N1;
// mix functions for k2
k2 *= C2;
k2 = Long.rotateLeft(k2, R3);
k2 *= C1;
h2 ^= k2;
h2 = Long.rotateLeft(h2, R1);
h2 += h1;
h2 = h2 * M + N2;
}
// tail
long k1 = 0;
long k2 = 0;
int tailStart = nblocks << 4;
switch (length - tailStart) {
case 15:
k2 ^= (long) (data[offset + tailStart + 14] & 0xff) << 48;
case 14:
k2 ^= (long) (data[offset + tailStart + 13] & 0xff) << 40;
case 13:
k2 ^= (long) (data[offset + tailStart + 12] & 0xff) << 32;
case 12:
k2 ^= (long) (data[offset + tailStart + 11] & 0xff) << 24;
case 11:
k2 ^= (long) (data[offset + tailStart + 10] & 0xff) << 16;
case 10:
k2 ^= (long) (data[offset + tailStart + 9] & 0xff) << 8;
case 9:
k2 ^= (long) (data[offset + tailStart + 8] & 0xff);
k2 *= C2;
k2 = Long.rotateLeft(k2, R3);
k2 *= C1;
h2 ^= k2;
case 8:
k1 ^= (long) (data[offset + tailStart + 7] & 0xff) << 56;
case 7:
k1 ^= (long) (data[offset + tailStart + 6] & 0xff) << 48;
case 6:
k1 ^= (long) (data[offset + tailStart + 5] & 0xff) << 40;
case 5:
k1 ^= (long) (data[offset + tailStart + 4] & 0xff) << 32;
case 4:
k1 ^= (long) (data[offset + tailStart + 3] & 0xff) << 24;
case 3:
k1 ^= (long) (data[offset + tailStart + 2] & 0xff) << 16;
case 2:
k1 ^= (long) (data[offset + tailStart + 1] & 0xff) << 8;
case 1:
k1 ^= (long) (data[offset + tailStart] & 0xff);
k1 *= C1;
k1 = Long.rotateLeft(k1, R1);
k1 *= C2;
h1 ^= k1;
}
// finalization
h1 ^= length;
h2 ^= length;
h1 += h2;
h2 += h1;
h1 = fmix64(h1);
h2 = fmix64(h2);
h1 += h2;
h2 += h1;
return new long[]{h1, h2};
}
private static long fmix64(long h) {
h ^= (h >>> 33);
h *= 0xff51afd7ed558ccdL;
h ^= (h >>> 33);
h *= 0xc4ceb9fe1a85ec53L;
h ^= (h >>> 33);
return h;
}
public static class IncrementalHash32 {
byte[] tail = new byte[3];
int tailLen;
int totalLen;
int hash;
public final void start(int hash) {
tailLen = totalLen = 0;
this.hash = hash;
}
public final void add(byte[] data, int offset, int length) {
if (length == 0) return;
totalLen += length;
if (tailLen + length < 4) {
System.arraycopy(data, offset, tail, tailLen, length);
tailLen += length;
return;
}
int offset2 = 0;
if (tailLen > 0) {
offset2 = (4 - tailLen);
int k = -1;
switch (tailLen) {
case 1:
k = orBytes(tail[0], data[offset], data[offset + 1], data[offset + 2]);
break;
case 2:
k = orBytes(tail[0], tail[1], data[offset], data[offset + 1]);
break;
case 3:
k = orBytes(tail[0], tail[1], tail[2], data[offset]);
break;
default: throw new AssertionError(tailLen);
}
// mix functions
k *= C1_32;
k = Integer.rotateLeft(k, R1_32);
k *= C2_32;
hash ^= k;
hash = Integer.rotateLeft(hash, R2_32) * M_32 + N_32;
}
int length2 = length - offset2;
offset += offset2;
final int nblocks = length2 >> 2;
for (int i = 0; i < nblocks; i++) {
int i_4 = (i << 2) + offset;
int k = orBytes(data[i_4], data[i_4 + 1], data[i_4 + 2], data[i_4 + 3]);
// mix functions
k *= C1_32;
k = Integer.rotateLeft(k, R1_32);
k *= C2_32;
hash ^= k;
hash = Integer.rotateLeft(hash, R2_32) * M_32 + N_32;
}
int consumed = (nblocks << 2);
tailLen = length2 - consumed;
if (consumed == length2) return;
System.arraycopy(data, offset + consumed, tail, 0, tailLen);
}
@SuppressFBWarnings(value = {"SF_SWITCH_FALLTHROUGH", "SF_SWITCH_NO_DEFAULT"}, justification = "Expected")
public final int end() {
int k1 = 0;
switch (tailLen) {
case 3:
k1 ^= tail[2] << 16;
case 2:
k1 ^= tail[1] << 8;
case 1:
k1 ^= tail[0];
// mix functions
k1 *= C1_32;
k1 = Integer.rotateLeft(k1, R1_32);
k1 *= C2_32;
hash ^= k1;
}
// finalization
hash ^= totalLen;
hash ^= (hash >>> 16);
hash *= 0x85ebca6b;
hash ^= (hash >>> 13);
hash *= 0xc2b2ae35;
hash ^= (hash >>> 16);
return hash;
}
}
private static int orBytes(byte b1, byte b2, byte b3, byte b4) {
return (b1 & 0xff) | ((b2 & 0xff) << 8) | ((b3 & 0xff) << 16) | ((b4 & 0xff) << 24);
}
}
| apache-2.0 |
joshualitt/DataflowJavaSDK | maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java | 11438 | /*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package ${package};
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.api.services.bigquery.model.TableReference;
import com.google.api.services.bigquery.model.TableRow;
import com.google.api.services.bigquery.model.TableSchema;
import ${package}.common.DataflowExampleUtils;
import com.google.cloud.dataflow.sdk.Pipeline;
import com.google.cloud.dataflow.sdk.PipelineResult;
import com.google.cloud.dataflow.sdk.io.BigQueryIO;
import com.google.cloud.dataflow.sdk.io.PubsubIO;
import com.google.cloud.dataflow.sdk.io.TextIO;
import com.google.cloud.dataflow.sdk.options.Default;
import com.google.cloud.dataflow.sdk.options.Description;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.transforms.DoFn;
import com.google.cloud.dataflow.sdk.transforms.ParDo;
import com.google.cloud.dataflow.sdk.transforms.windowing.FixedWindows;
import com.google.cloud.dataflow.sdk.transforms.windowing.Window;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.cloud.dataflow.sdk.values.PCollection;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* An example that counts words in text, and can run over either unbounded or bounded input
* collections.
*
* <p>This class, {@link WindowedWordCount}, is the last in a series of four successively more
* detailed 'word count' examples. First take a look at {@link MinimalWordCount},
* {@link WordCount}, and {@link DebuggingWordCount}.
*
* <p>Basic concepts, also in the MinimalWordCount, WordCount, and DebuggingWordCount examples:
* Reading text files; counting a PCollection; writing to GCS; executing a Pipeline both locally
* and using the Dataflow service; defining DoFns; creating a custom aggregator;
* user-defined PTransforms; defining PipelineOptions.
*
* <p>New Concepts:
* <pre>
* 1. Unbounded and bounded pipeline input modes
* 2. Adding timestamps to data
* 3. PubSub topics as sources
* 4. Windowing
* 5. Re-using PTransforms over windowed PCollections
* 6. Writing to BigQuery
* </pre>
*
* <p>To execute this pipeline locally, specify general pipeline configuration:
* <pre>{@code
* --project=YOUR_PROJECT_ID
* }
* </pre>
*
* <p>To execute this pipeline using the Dataflow service, specify pipeline configuration:
* <pre>{@code
* --project=YOUR_PROJECT_ID
* --stagingLocation=gs://YOUR_STAGING_DIRECTORY
* --runner=BlockingDataflowPipelineRunner
* }
* </pre>
*
* <p>Optionally specify the input file path via:
* {@code --inputFile=gs://INPUT_PATH},
* which defaults to {@code gs://dataflow-samples/shakespeare/kinglear.txt}.
*
* <p>Specify an output BigQuery dataset and optionally, a table for the output. If you don't
* specify the table, one will be created for you using the job name. If you don't specify the
* dataset, a dataset called {@code dataflow-examples} must already exist in your project.
* {@code --bigQueryDataset=YOUR-DATASET --bigQueryTable=YOUR-NEW-TABLE-NAME}.
*
* <p>Decide whether you want your pipeline to run with 'bounded' (such as files in GCS) or
* 'unbounded' input (such as a PubSub topic). To run with unbounded input, set
* {@code --unbounded=true}. Then, optionally specify the Google Cloud PubSub topic to read from
* via {@code --pubsubTopic=projects/PROJECT_ID/topics/YOUR_TOPIC_NAME}. If the topic does not
* exist, the pipeline will create one for you. It will delete this topic when it terminates.
* The pipeline will automatically launch an auxiliary batch pipeline to populate the given PubSub
* topic with the contents of the {@code --inputFile}, in order to make the example easy to run.
* If you want to use an independently-populated PubSub topic, indicate this by setting
* {@code --inputFile=""}. In that case, the auxiliary pipeline will not be started.
*
* <p>By default, the pipeline will do fixed windowing, on 1-minute windows. You can
* change this interval by setting the {@code --windowSize} parameter, e.g. {@code --windowSize=10}
* for 10-minute windows.
*/
public class WindowedWordCount {
private static final Logger LOG = LoggerFactory.getLogger(WindowedWordCount.class);
static final int WINDOW_SIZE = 1; // Default window duration in minutes
/**
* Concept #2: A DoFn that sets the data element timestamp. This is a silly method, just for
* this example, for the bounded data case.
*
* <p>Imagine that many ghosts of Shakespeare are all typing madly at the same time to recreate
* his masterworks. Each line of the corpus will get a random associated timestamp somewhere in a
* 2-hour period.
*/
static class AddTimestampFn extends DoFn<String, String> {
private static final long RAND_RANGE = 7200000; // 2 hours in ms
@Override
public void processElement(ProcessContext c) {
// Generate a timestamp that falls somewhere in the past two hours.
long randomTimestamp = System.currentTimeMillis()
- (int) (Math.random() * RAND_RANGE);
/**
* Concept #2: Set the data element with that timestamp.
*/
c.outputWithTimestamp(c.element(), new Instant(randomTimestamp));
}
}
/** A DoFn that converts a Word and Count into a BigQuery table row. */
static class FormatAsTableRowFn extends DoFn<KV<String, Long>, TableRow> {
@Override
public void processElement(ProcessContext c) {
TableRow row = new TableRow()
.set("word", c.element().getKey())
.set("count", c.element().getValue())
// include a field for the window timestamp
.set("window_timestamp", c.timestamp().toString());
c.output(row);
}
}
/**
* Helper method that defines the BigQuery schema used for the output.
*/
private static TableSchema getSchema() {
List<TableFieldSchema> fields = new ArrayList<>();
fields.add(new TableFieldSchema().setName("word").setType("STRING"));
fields.add(new TableFieldSchema().setName("count").setType("INTEGER"));
fields.add(new TableFieldSchema().setName("window_timestamp").setType("TIMESTAMP"));
TableSchema schema = new TableSchema().setFields(fields);
return schema;
}
/**
* Concept #6: We'll stream the results to a BigQuery table. The BigQuery output source is one
* that supports both bounded and unbounded data. This is a helper method that creates a
* TableReference from input options, to tell the pipeline where to write its BigQuery results.
*/
private static TableReference getTableReference(Options options) {
TableReference tableRef = new TableReference();
tableRef.setProjectId(options.getProject());
tableRef.setDatasetId(options.getBigQueryDataset());
tableRef.setTableId(options.getBigQueryTable());
return tableRef;
}
/**
* Options supported by {@link WindowedWordCount}.
*
* <p>Inherits standard example configuration options, which allow specification of the BigQuery
* table and the PubSub topic, as well as the {@link WordCount.WordCountOptions} support for
* specification of the input file.
*/
public static interface Options
extends WordCount.WordCountOptions, DataflowExampleUtils.DataflowExampleUtilsOptions {
@Description("Fixed window duration, in minutes")
@Default.Integer(WINDOW_SIZE)
Integer getWindowSize();
void setWindowSize(Integer value);
@Description("Whether to run the pipeline with unbounded input")
boolean isUnbounded();
void setUnbounded(boolean value);
}
public static void main(String[] args) throws IOException {
Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
options.setBigQuerySchema(getSchema());
// DataflowExampleUtils creates the necessary input sources to simplify execution of this
// Pipeline.
DataflowExampleUtils exampleDataflowUtils = new DataflowExampleUtils(options,
options.isUnbounded());
Pipeline pipeline = Pipeline.create(options);
/**
* Concept #1: the Dataflow SDK lets us run the same pipeline with either a bounded or
* unbounded input source.
*/
PCollection<String> input;
if (options.isUnbounded()) {
LOG.info("Reading from PubSub.");
/**
* Concept #3: Read from the PubSub topic. A topic will be created if it wasn't
* specified as an argument. The data elements' timestamps will come from the pubsub
* injection.
*/
input = pipeline
.apply(PubsubIO.Read.topic(options.getPubsubTopic()));
} else {
/** Else, this is a bounded pipeline. Read from the GCS file. */
input = pipeline
.apply(TextIO.Read.from(options.getInputFile()))
// Concept #2: Add an element timestamp, using an artificial time just to show windowing.
// See AddTimestampFn for more detail on this.
.apply(ParDo.of(new AddTimestampFn()));
}
/**
* Concept #4: Window into fixed windows. The fixed window size for this example defaults to 1
* minute (you can change this with a command-line option). See the documentation for more
* information on how fixed windows work, and for information on the other types of windowing
* available (e.g., sliding windows).
*/
PCollection<String> windowedWords = input
.apply(Window.<String>into(
FixedWindows.of(Duration.standardMinutes(options.getWindowSize()))));
/**
* Concept #5: Re-use our existing CountWords transform that does not have knowledge of
* windows over a PCollection containing windowed values.
*/
PCollection<KV<String, Long>> wordCounts = windowedWords.apply(new WordCount.CountWords());
/**
* Concept #6: Format the results for a BigQuery table, then write to BigQuery.
* The BigQuery output source supports both bounded and unbounded data.
*/
wordCounts.apply(ParDo.of(new FormatAsTableRowFn()))
.apply(BigQueryIO.Write.to(getTableReference(options)).withSchema(getSchema()));
PipelineResult result = pipeline.run();
/**
* To mock unbounded input from PubSub, we'll now start an auxiliary 'injector' pipeline that
* runs for a limited time, and publishes to the input PubSub topic.
*
* With an unbounded input source, you will need to explicitly shut down this pipeline when you
* are done with it, so that you do not continue to be charged for the instances. You can do
* this via a ctrl-C from the command line, or from the developer's console UI for Dataflow
* pipelines. The PubSub topic will also be deleted at this time.
*/
exampleDataflowUtils.mockUnboundedSource(options.getInputFile(), result);
}
}
| apache-2.0 |
objectiser/camel | core/camel-management/src/test/java/org/apache/camel/management/ManagedSplitterTest.java | 3025 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.management;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.camel.ServiceStatus;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.junit.Test;
public class ManagedSplitterTest extends ManagementTestSupport {
@Test
public void testManageSplitter() throws Exception {
// JMX tests dont work well on AIX CI servers (hangs them)
if (isPlatform("aix")) {
return;
}
MockEndpoint foo = getMockEndpoint("mock:foo");
foo.expectedMessageCount(2);
template.sendBody("direct:start", "Hello,World");
assertMockEndpointsSatisfied();
// get the stats for the route
MBeanServer mbeanServer = getMBeanServer();
// get the object name for the delayer
ObjectName on = ObjectName.getInstance("org.apache.camel:context=camel-1,type=processors,name=\"mysend\"");
// should be on route1
String routeId = (String) mbeanServer.getAttribute(on, "RouteId");
assertEquals("route1", routeId);
String camelId = (String) mbeanServer.getAttribute(on, "CamelId");
assertEquals("camel-1", camelId);
String state = (String) mbeanServer.getAttribute(on, "State");
assertEquals(ServiceStatus.Started.name(), state);
String lan = (String) mbeanServer.getAttribute(on, "Expression");
assertEquals("${body}", lan);
String exp = (String) mbeanServer.getAttribute(on, "Expression");
assertEquals("${body}", exp);
String xml = (String) mbeanServer.invoke(on, "dumpProcessorAsXml", null, null);
assertTrue(xml.contains("<split"));
assertTrue(xml.contains("</split>"));
assertTrue(xml.contains("<simple>${body}</simple>"));
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.split(simple("${body}")).id("mysend")
.to("mock:foo");
}
};
}
}
| apache-2.0 |
goodwinnk/intellij-community | jps/jps-builders/src/org/jetbrains/jps/incremental/instrumentation/RmiStubsGenerator.java | 13881 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.jps.incremental.instrumentation;
import com.intellij.compiler.instrumentation.InstrumentationClassFinder;
import com.intellij.execution.process.BaseOSProcessHandler;
import com.intellij.execution.process.ProcessAdapter;
import com.intellij.execution.process.ProcessEvent;
import com.intellij.execution.process.ProcessOutputTypes;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.SmartList;
import com.intellij.util.SystemProperties;
import gnu.trove.THashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.ModuleChunk;
import org.jetbrains.jps.ProjectPaths;
import org.jetbrains.jps.incremental.*;
import org.jetbrains.jps.incremental.messages.BuildMessage;
import org.jetbrains.jps.incremental.messages.CompilerMessage;
import org.jetbrains.jps.model.java.JpsJavaExtensionService;
import org.jetbrains.jps.model.java.JpsJavaSdkType;
import org.jetbrains.jps.model.java.compiler.JpsJavaCompilerConfiguration;
import org.jetbrains.jps.model.java.compiler.JpsJavaCompilerOptions;
import org.jetbrains.jps.model.java.compiler.RmicCompilerOptions;
import org.jetbrains.jps.model.library.sdk.JpsSdk;
import org.jetbrains.jps.service.SharedThreadPool;
import java.io.File;
import java.io.IOException;
import java.rmi.Remote;
import java.util.*;
import java.util.concurrent.Future;
/**
* @author Eugene Zhuravlev
*/
public class RmiStubsGenerator extends ClassProcessingBuilder {
private static final String REMOTE_INTERFACE_NAME = Remote.class.getName().replace('.', '/');
private static final File[] EMPTY_FILE_ARRAY = new File[0];
private static final Key<Boolean> IS_ENABLED = Key.create("_rmic_compiler_enabled_");
public RmiStubsGenerator() {
super(BuilderCategory.CLASS_INSTRUMENTER);
}
@Override
protected String getProgressMessage() {
return "Generating RMI stubs...";
}
@NotNull
@Override
public String getPresentableName() {
return "rmic";
}
@Override
public void buildStarted(CompileContext context) {
super.buildStarted(context);
final RmicCompilerOptions rmicOptions = getOptions(context);
IS_ENABLED.set(context, rmicOptions != null && rmicOptions.IS_EANABLED);
}
@Override
protected boolean isEnabled(CompileContext context, ModuleChunk chunk) {
return IS_ENABLED.get(context, Boolean.FALSE);
}
@Override
protected ExitCode performBuild(CompileContext context, ModuleChunk chunk, InstrumentationClassFinder finder, OutputConsumer outputConsumer) {
ExitCode exitCode = ExitCode.NOTHING_DONE;
if (!outputConsumer.getCompiledClasses().isEmpty()) {
final Map<ModuleBuildTarget, Collection<ClassItem>> remoteClasses = new THashMap<>();
for (ModuleBuildTarget target : chunk.getTargets()) {
for (CompiledClass compiledClass : outputConsumer.getTargetCompiledClasses(target)) {
try {
if (isRemote(compiledClass, finder)) {
Collection<ClassItem> list = remoteClasses.get(target);
if (list == null) {
list = new ArrayList<>();
remoteClasses.put(target, list);
}
list.add(new ClassItem(compiledClass));
}
}
catch (IOException e) {
context.processMessage(new CompilerMessage(getPresentableName(), e));
}
}
}
if (!remoteClasses.isEmpty()) {
exitCode = generateRmiStubs(context, remoteClasses, chunk, outputConsumer);
}
}
return exitCode;
}
private ExitCode generateRmiStubs(final CompileContext context,
Map<ModuleBuildTarget, Collection<ClassItem>> remoteClasses,
ModuleChunk chunk,
OutputConsumer outputConsumer) {
ExitCode exitCode = ExitCode.NOTHING_DONE;
final Collection<File> classpath = ProjectPaths.getCompilationClasspath(chunk, false);
final StringBuilder buf = new StringBuilder();
for (File file : classpath) {
if (buf.length() > 0) {
buf.append(File.pathSeparator);
}
buf.append(file.getPath());
}
final String classpathString = buf.toString();
final String rmicPath = getPathToRmic(chunk);
final RmicCompilerOptions options = getOptions(context);
final List<ModuleBuildTarget> targetsProcessed = new ArrayList<>(remoteClasses.size());
for (Map.Entry<ModuleBuildTarget, Collection<ClassItem>> entry : remoteClasses.entrySet()) {
try {
final ModuleBuildTarget target = entry.getKey();
final Collection<String> cmdLine = createStartupCommand(
target, rmicPath, classpathString, options, entry.getValue()
);
final Process process = Runtime.getRuntime().exec(ArrayUtil.toStringArray(cmdLine));
final BaseOSProcessHandler handler = new BaseOSProcessHandler(process, StringUtil.join(cmdLine, " "), null) {
@NotNull
@Override
protected Future<?> executeOnPooledThread(@NotNull Runnable task) {
return SharedThreadPool.getInstance().executeOnPooledThread(task);
}
};
final RmicOutputParser stdOutParser = new RmicOutputParser(context, getPresentableName());
final RmicOutputParser stdErrParser = new RmicOutputParser(context, getPresentableName());
handler.addProcessListener(new ProcessAdapter() {
@Override
public void onTextAvailable(@NotNull ProcessEvent event, @NotNull Key outputType) {
if (outputType == ProcessOutputTypes.STDOUT) {
stdOutParser.append(event.getText());
}
else if (outputType == ProcessOutputTypes.STDERR) {
stdErrParser.append(event.getText());
}
}
@Override
public void processTerminated(@NotNull ProcessEvent event) {
super.processTerminated(event);
}
});
handler.startNotify();
handler.waitFor();
targetsProcessed.add(target);
if (stdErrParser.isErrorsReported() || stdOutParser.isErrorsReported()) {
break;
}
else {
final int exitValue = handler.getProcess().exitValue();
if (exitValue != 0) {
context.processMessage(new CompilerMessage(getPresentableName(), BuildMessage.Kind.ERROR, "RMI stub generation failed"));
break;
}
}
}
catch (IOException e) {
context.processMessage(new CompilerMessage(getPresentableName(), e));
break;
}
}
// registering generated files
final Map<File, File[]> fsCache = new THashMap<>(FileUtil.FILE_HASHING_STRATEGY);
for (ModuleBuildTarget target : targetsProcessed) {
final Collection<ClassItem> items = remoteClasses.get(target);
for (ClassItem item : items) {
File[] children = fsCache.get(item.parentDir);
if (children == null) {
children = item.parentDir.listFiles();
if (children == null) {
children = EMPTY_FILE_ARRAY;
}
fsCache.put(item.parentDir, children);
}
final Collection<File> files = item.selectGeneratedFiles(children);
if (!files.isEmpty()) {
final Collection<String> sources = item.compiledClass.getSourceFilesPaths();
for (File generated : files) {
try {
outputConsumer.registerOutputFile(target, generated, sources);
}
catch (IOException e) {
context.processMessage(new CompilerMessage(getPresentableName(), e));
}
}
}
}
}
return exitCode;
}
private static Collection<String> createStartupCommand(final ModuleBuildTarget target,
final String compilerPath,
final String classpath,
final RmicCompilerOptions config,
final Collection<ClassItem> items) {
final List<String> commandLine = new ArrayList<>();
commandLine.add(compilerPath);
if (config.DEBUGGING_INFO) {
commandLine.add("-g");
}
if(config.GENERATE_IIOP_STUBS) {
commandLine.add("-iiop");
}
final StringTokenizer tokenizer = new StringTokenizer(config.ADDITIONAL_OPTIONS_STRING, " \t\r\n");
while(tokenizer.hasMoreTokens()) {
final String token = tokenizer.nextToken();
commandLine.add(token);
}
commandLine.add("-classpath");
commandLine.add(classpath);
commandLine.add("-d");
final File outputDir = target.getOutputDir();
assert outputDir != null;
commandLine.add(outputDir.getPath());
for (ClassItem item : items) {
commandLine.add(item.compiledClass.getClassName());
}
return commandLine;
}
private static String getPathToRmic(ModuleChunk chunk) {
final JpsSdk<?> sdk = chunk.representativeTarget().getModule().getSdk(JpsJavaSdkType.INSTANCE);
if (sdk != null) {
final String executable = JpsJavaSdkType.getJavaExecutable(sdk);
if (executable != null) {
final int idx = FileUtil.toSystemIndependentName(executable).lastIndexOf("/");
if (idx >= 0) {
return executable.substring(0, idx) + "/rmic";
}
}
}
return SystemProperties.getJavaHome() + "/bin/rmic";
}
private static boolean isRemote(CompiledClass compiled, InstrumentationClassFinder finder) throws IOException{
try {
final InstrumentationClassFinder.PseudoClass pseudoClass = finder.loadClass(compiled.getClassName());
if (pseudoClass != null && !pseudoClass.isInterface()) {
for (InstrumentationClassFinder.PseudoClass anInterface : pseudoClass.getInterfaces()) {
if (isRemoteInterface(anInterface, REMOTE_INTERFACE_NAME)) {
return true;
}
}
}
}
catch (ClassNotFoundException ignored) {
}
return false;
}
private static boolean isRemoteInterface(InstrumentationClassFinder.PseudoClass iface, final String remoteInterfaceName)
throws IOException, ClassNotFoundException {
if (remoteInterfaceName.equals(iface.getName())) {
return true;
}
for (InstrumentationClassFinder.PseudoClass superIface : iface.getInterfaces()) {
if (isRemoteInterface(superIface, remoteInterfaceName)) {
return true;
}
}
return false;
}
@Nullable
private static RmicCompilerOptions getOptions(CompileContext context) {
final JpsJavaCompilerConfiguration config = JpsJavaExtensionService.getInstance().getCompilerConfiguration(context.getProjectDescriptor().getProject());
if (config != null) {
final JpsJavaCompilerOptions options = config.getCompilerOptions("Rmic");
if (options instanceof RmicCompilerOptions) {
return (RmicCompilerOptions)options;
}
}
return null;
}
private static final class ClassItem {
static final String[] GEN_SUFFIXES = {"_Stub.class", "_Skel.class", "_Tie.class"};
final CompiledClass compiledClass;
final File parentDir;
final String baseName;
ClassItem(CompiledClass compiledClass) {
this.compiledClass = compiledClass;
final File outputFile = compiledClass.getOutputFile();
parentDir = outputFile.getParentFile();
baseName = StringUtil.trimEnd(outputFile.getName(), ".class");
}
@NotNull
public Collection<File> selectGeneratedFiles(File[] candidates) {
if (candidates == null || candidates.length == 0) {
return Collections.emptyList();
}
final Collection<File> result = new SmartList<>();
final String[] suffixes = new String[GEN_SUFFIXES.length];
for (int i = 0; i < GEN_SUFFIXES.length; i++) {
suffixes[i] = baseName + GEN_SUFFIXES[i];
}
for (File candidate : candidates) {
final String name = candidate.getName();
for (String suffix : suffixes) {
if (name.endsWith(suffix)) {
result.add(candidate);
break;
}
}
}
return result;
}
}
private static class RmicOutputParser extends LineOutputWriter {
private final CompileContext myContext;
private final String myCompilerName;
private boolean myErrorsReported = false;
private RmicOutputParser(CompileContext context, String name) {
myContext = context;
myCompilerName = name;
}
private boolean isErrorsReported() {
return myErrorsReported;
}
@Override
protected void lineAvailable(String line) {
if (!StringUtil.isEmpty(line)) {
BuildMessage.Kind kind = BuildMessage.Kind.INFO;
if (line.contains("error")) {
kind = BuildMessage.Kind.ERROR;
myErrorsReported = true;
}
else if (line.contains("warning")) {
kind = BuildMessage.Kind.WARNING;
}
myContext.processMessage(new CompilerMessage(myCompilerName, kind, line));
}
}
}
}
| apache-2.0 |
ASU-Capstone/uPortal-Forked | uportal-war/src/main/java/org/jasig/portal/rest/LayoutRESTController.java | 7697 | /**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.rest;
import java.util.ArrayList;
import java.util.List;
import javax.portlet.WindowState;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jasig.portal.IUserPreferencesManager;
import org.jasig.portal.IUserProfile;
import org.jasig.portal.layout.IUserLayoutStore;
import org.jasig.portal.layout.dlm.DistributedUserLayout;
import org.jasig.portal.portlet.dao.IPortletDefinitionDao;
import org.jasig.portal.portlet.om.IPortletDefinition;
import org.jasig.portal.portlet.om.IPortletDefinitionParameter;
import org.jasig.portal.portlet.om.IPortletPreference;
import org.jasig.portal.portlet.om.IPortletWindowId;
import org.jasig.portal.rest.layout.LayoutPortlet;
import org.jasig.portal.rest.layout.TabListOfNodes;
import org.jasig.portal.security.IPerson;
import org.jasig.portal.security.IPersonManager;
import org.jasig.portal.url.IPortalUrlBuilder;
import org.jasig.portal.url.IPortalUrlProvider;
import org.jasig.portal.url.IPortletUrlBuilder;
import org.jasig.portal.url.UrlType;
import org.jasig.portal.user.IUserInstance;
import org.jasig.portal.user.IUserInstanceManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
@Controller
public class LayoutRESTController {
protected final Log log = LogFactory.getLog(getClass());
IUserLayoutStore userLayoutStore;
@Autowired(required = true)
public void setUserLayoutStore(IUserLayoutStore userLayoutStore) {
this.userLayoutStore = userLayoutStore;
}
IPersonManager personManager;
@Autowired(required = true)
public void setPersonManager(IPersonManager personManager) {
this.personManager = personManager;
}
private IPortalUrlProvider urlProvider;
@Autowired(required = true)
public void setPortalUrlProvider(IPortalUrlProvider urlProvider) {
this.urlProvider = urlProvider;
}
private IUserInstanceManager userInstanceManager;
@Autowired(required = true)
public void setUserInstanceManager(IUserInstanceManager userInstanceManager) {
this.userInstanceManager = userInstanceManager;
}
private IPortletDefinitionDao portletDao;
@Autowired(required = true)
public void setPortletDao(IPortletDefinitionDao portletDao) {
this.portletDao = portletDao;
}
/**
* A REST call to get a json feed of the current users layout
* @param request The servlet request. Utilized to get the users instance and eventually there layout
* @param tab The tab name of which you would like to filter; optional; if not provided, will return entire layout.
* @return json feed of the layout
*/
@RequestMapping(value="/layoutDoc", method = RequestMethod.GET)
public ModelAndView getRESTController(HttpServletRequest request, @RequestParam(value = "tab", required = false) String tab) {
final IPerson person = personManager.getPerson(request);
List<LayoutPortlet> portlets = new ArrayList<LayoutPortlet>();
try {
final IUserInstance ui = userInstanceManager.getUserInstance(request);
final IUserPreferencesManager upm = ui.getPreferencesManager();
final IUserProfile profile = upm.getUserProfile();
final DistributedUserLayout userLayout = userLayoutStore.getUserLayout(person, profile);
Document document = userLayout.getLayout();
NodeList portletNodes = null;
if(tab != null) {
NodeList folders = document.getElementsByTagName("folder");
for (int i = 0; i < folders.getLength(); i++) {
Node node = folders.item(i);
if(tab.equalsIgnoreCase(node.getAttributes().getNamedItem("name").getNodeValue())) {
TabListOfNodes tabNodes = new TabListOfNodes();
tabNodes.addAllChannels(node.getChildNodes());
portletNodes = tabNodes;
break;
}
}
} else {
portletNodes = document.getElementsByTagName("channel");
}
for (int i = 0; i < portletNodes.getLength(); i++) {
try {
NamedNodeMap attributes = portletNodes.item(i).getAttributes();
IPortletDefinition def = portletDao.getPortletDefinitionByFname(attributes.getNamedItem("fname").getNodeValue());
LayoutPortlet portlet = new LayoutPortlet(def);
portlet.setNodeId(attributes.getNamedItem("ID").getNodeValue());
//get alt max URL
String alternativeMaximizedLink = def.getAlternativeMaximizedLink();
if( alternativeMaximizedLink != null) {
portlet.setUrl(alternativeMaximizedLink);
portlet.setAltMaxUrl(true);
} else {
// get the maximized URL for this portlet
final IPortalUrlBuilder portalUrlBuilder = urlProvider.getPortalUrlBuilderByLayoutNode(request, attributes.getNamedItem("ID").getNodeValue(), UrlType.RENDER);
final IPortletWindowId targetPortletWindowId = portalUrlBuilder.getTargetPortletWindowId();
if (targetPortletWindowId != null) {
final IPortletUrlBuilder portletUrlBuilder = portalUrlBuilder.getPortletUrlBuilder(targetPortletWindowId);
portletUrlBuilder.setWindowState(WindowState.MAXIMIZED);
}
portlet.setUrl(portalUrlBuilder.getUrlString());
portlet.setAltMaxUrl(false);
}
portlets.add(portlet);
} catch (Exception e) {
log.warn("Exception construction JSON representation of mobile portlet", e);
}
}
ModelAndView mv = new ModelAndView();
mv.addObject("layout", portlets);
mv.setViewName("json");
return mv;
} catch (Exception e) {
log.error("Error retrieving user layout document", e);
}
return null;
}
}
| apache-2.0 |
JMaltat/fragaria-ektorp | org.ektorp/src/main/java/org/ektorp/support/AttachmentsInOrderParser.java | 3090 | package org.ektorp.support;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
/**
* A document's MIME multipart/related representation produced by CouchDB
* uses the order of the attachments in the JSON _attachments object as the
* order of the attachments in the multipart/related. Thus, the order must
* be preserved in order to parse a CouchDB multipart/related message.
*
* This class parses a document and returns the order of the attachments.
*
* @author David Venable
*/
public class AttachmentsInOrderParser
{
/**
* Parses a CouchDB document in the form of a JsonParser to get the
* attachments order. It is important that the JsonParser come straight
* from the source document and not from an object, or the order will
* be incorrect.
* @param documentJsonParser a JsonParser which is at the very root of a JSON CouchDB document
* @return the list of attachment names in the order provided in the document
* @throws IOException
*/
public static List<String> parseAttachmentNames(JsonParser documentJsonParser) throws IOException
{
documentJsonParser.nextToken();
JsonToken jsonToken;
while((jsonToken = documentJsonParser.nextToken()) != JsonToken.END_OBJECT)
{
if(CouchDbDocument.ATTACHMENTS_NAME.equals(documentJsonParser.getCurrentName()))
{
return readAttachments(documentJsonParser);
}
else if(jsonToken == JsonToken.START_OBJECT)
{
readIgnoreObject(documentJsonParser);
}
}
return null;
}
private static List<String> readAttachments(JsonParser jsonParser) throws IOException
{
jsonParser.nextToken();
return readAttachmentsObject(jsonParser);
}
private static List<String> readAttachmentsObject(JsonParser jsonParser) throws IOException
{
List<String> attachmentNameList = new ArrayList<String>();
while(jsonParser.nextToken() != JsonToken.END_OBJECT)
{
String attachmentName = jsonParser.getCurrentName();
jsonParser.nextToken();
if(jsonParser.getCurrentToken() != JsonToken.START_OBJECT)
{
String message = CouchDbDocument.ATTACHMENTS_NAME + " contains an invalid object.";
throw new JsonParseException(message, jsonParser.getCurrentLocation());
}
readIgnoreObject(jsonParser);
attachmentNameList.add(attachmentName);
}
return attachmentNameList;
}
private static void readIgnoreObject(JsonParser jsonParser) throws IOException
{
while(jsonParser.nextToken() != JsonToken.END_OBJECT)
{
if(jsonParser.getCurrentToken() == JsonToken.START_OBJECT)
{
readIgnoreObject(jsonParser);
}
}
}
}
| apache-2.0 |
EArdeleanu/gateway | transport/http/src/test/java/org/kaazing/gateway/transport/http/security/auth/challenge/TokenHttpChallengeFactoryTest.java | 7386 | /**
* Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.transport.http.security.auth.challenge;
import static org.junit.Assert.assertEquals;
import static org.kaazing.gateway.resource.address.http.HttpResourceAddress.REALM_CHALLENGE_SCHEME;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.lib.legacy.ClassImposteriser;
import org.junit.Before;
import org.junit.Test;
import org.kaazing.gateway.resource.address.ResourceAddress;
import org.kaazing.gateway.transport.http.HttpStatus;
import org.kaazing.gateway.transport.http.bridge.HttpRequestMessage;
import org.kaazing.gateway.transport.http.bridge.HttpResponseMessage;
public class TokenHttpChallengeFactoryTest {
TokenHttpChallengeFactory factory;
Mockery context;
@Before
public void setUp() throws Exception {
factory = new TokenHttpChallengeFactory();
context = new Mockery() {
{
setImposteriser(ClassImposteriser.INSTANCE);
}
};
}
@Test
public void canBuildASimpleChallenge() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Token"));
}
});
HttpResponseMessage response = factory.createChallenge(request);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
assertEquals("Token", response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildASimpleChallengeWithParams() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Token"));
}
});
Object[] params = new Object[] { "foo=\"bar\"", "baz=\"quxx\"" };
HttpResponseMessage response = factory.createChallenge(request, params);
context.assertIsSatisfied();
assertEquals("Token foo=\"bar\" baz=\"quxx\"", response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildASimpleChallengeWithNullParams() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Token"));
}
});
Object[] params = null;
HttpResponseMessage response = factory.createChallenge(request, params);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
assertEquals("Token", response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildAnApplicationChallenge() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Application Token"));
}
});
HttpResponseMessage response = factory.createChallenge(request);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
assertEquals("Application Token", response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildAnApplicationChallengeWithParams() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Application Token"));
}
});
Object[] params = new Object[] { "foo=\"bar\"", "baz=\"quxx\"" };
HttpResponseMessage response = factory.createChallenge(request, params);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
String expected = "Application Token foo=\"bar\" baz=\"quxx\"";
assertEquals(expected, response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildAnApplicationChallengeWithNullParams() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Application Token"));
}
});
Object[] params = null;
HttpResponseMessage response = factory.createChallenge(request, params);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
String expected = "Application Token";
assertEquals(expected, response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildAChallengeWhenAuthTypeIsNull() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue(null));
}
});
HttpResponseMessage response = factory.createChallenge(request);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
assertEquals("Token", response.getHeader("WWW-Authenticate"));
}
}
| apache-2.0 |
spirit03/one | src/oca/java/src/org/opennebula/client/vm/VirtualMachinePool.java | 11017 | /*******************************************************************************
* Copyright 2002-2015, OpenNebula Project (OpenNebula.org), C12G Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.opennebula.client.vm;
import java.util.AbstractList;
import java.util.Iterator;
import org.opennebula.client.Client;
import org.opennebula.client.OneResponse;
import org.opennebula.client.Pool;
import org.opennebula.client.PoolElement;
import org.w3c.dom.Node;
/**
* This class represents an OpenNebula VM pool.
* It also offers static XML-RPC call wrappers.
*/
public class VirtualMachinePool extends Pool implements Iterable<VirtualMachine>{
private static final String ELEMENT_NAME = "VM";
private static final String INFO_METHOD = "vmpool.info";
private static final String MONITORING = "vmpool.monitoring";
/**
* Flag for Virtual Machines in any state.
*/
public static final int ALL_VM = -2;
/**
* Flag for Virtual Machines in any state, except for DONE.
*/
public static final int NOT_DONE = -1;
private int filter;
/**
* Creates a new Virtual Machine pool with the default filter flag value
* set to {@link Pool#MINE_GROUP} (Virtual Machines belonging to the connected user,
* and the ones in his group)
*
* @param client XML-RPC Client.
*
* @see VirtualMachinePool#VirtualMachinePool(Client, int)
*/
public VirtualMachinePool(Client client)
{
super(ELEMENT_NAME, client, INFO_METHOD);
this.filter = MINE_GROUP;
}
/**
* Creates a new Virtual Machine pool.
*
* @param client XML-RPC Client.
* @param filter Filter flag to use by default in the method
* {@link VirtualMachinePool#info()}. Possible values:
* <ul>
* <li>{@link Pool#ALL}: All Virtual Machines</li>
* <li>{@link Pool#MINE}: Connected user's Virtual Machines</li>
* <li>{@link Pool#MINE_GROUP}: Connected user's Virtual Machines, and the ones in
* his group</li>
* <li>>= 0: UID User's Virtual Machines</li>
* </ul>
*/
public VirtualMachinePool(Client client, int filter)
{
super(ELEMENT_NAME, client, INFO_METHOD);
this.filter = filter;
}
@Override
public PoolElement factory(Node node)
{
return new VirtualMachine(node, client);
}
/**
* Retrieves all or part of the Virtual Machines in the pool.
*
* @param client XML-RPC Client.
* @param filter Filter flag to use. Possible values:
* <ul>
* <li>{@link Pool#ALL}: All Virtual Machines</li>
* <li>{@link Pool#MINE}: Connected user's Virtual Machines</li>
* <li>{@link Pool#MINE_GROUP}: Connected user's Virtual Machines, and the ones in
* his group</li>
* <li>>= 0: UID User's Virtual Machines</li>
* </ul>
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public static OneResponse info(Client client, int filter)
{
return client.call(INFO_METHOD, filter, -1, -1, NOT_DONE);
}
/**
* Retrieves all the Virtual Machines in the pool.
*
* @param client XML-RPC Client.
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public static OneResponse infoAll(Client client)
{
return client.call(INFO_METHOD, ALL, -1, -1, NOT_DONE);
}
/**
* Retrieves all the connected user's Virtual Machines.
*
* @param client XML-RPC Client.
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public static OneResponse infoMine(Client client)
{
return client.call(INFO_METHOD, MINE, -1, -1, NOT_DONE);
}
/**
* Retrieves all the connected user's Virtual Machines and the ones in
* his group.
*
* @param client XML-RPC Client.
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public static OneResponse infoGroup(Client client)
{
return client.call(INFO_METHOD, MINE_GROUP, -1, -1, NOT_DONE);
}
/**
* Retrieves all or part of the Virtual Machines in the pool. The
* Virtual Machines to retrieve can be also filtered by Id, specifying the
* first and last Id to include; and by state.
*
* @param client XML-RPC Client.
* @param filter Filter flag to use. Possible values:
* <ul>
* <li>{@link Pool#ALL}: All Virtual Machines</li>
* <li>{@link Pool#MINE}: Connected user's Virtual Machines</li>
* <li>{@link Pool#MINE_GROUP}: Connected user's Virtual Machines, and the ones in
* his group</li>
* <li>>= 0: UID User's Virtual Machines</li>
* </ul>
* @param startId Lowest Id to retrieve
* @param endId Biggest Id to retrieve
* @param state Numeric state of the Virtual Machines wanted, or one
* of {@link VirtualMachinePool#ALL_VM} or
* {@link VirtualMachinePool#NOT_DONE}
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public static OneResponse info(Client client, int filter,
int startId, int endId, int state)
{
return client.call(INFO_METHOD, filter, startId, endId, state);
}
/**
* Retrieves the monitoring data for all or part of the Virtual
* Machines in the pool.
*
* @param client XML-RPC Client.
* @param filter Filter flag to use. Possible values:
* <ul>
* <li>{@link Pool#ALL}: All Virtual Machines</li>
* <li>{@link Pool#MINE}: Connected user's Virtual Machines</li>
* <li>{@link Pool#MINE_GROUP}: Connected user's Virtual Machines, and
* the ones in his group</li>
* <li>>= 0: UID User's Virtual Machines</li>
* </ul>
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public static OneResponse monitoring(Client client, int filter)
{
return client.call(MONITORING, filter);
}
/**
* Loads the xml representation of all or part of the
* Virtual Machines in the pool. The filter used is the one set in
* the constructor.
*
* @see VirtualMachinePool#info(Client, int)
*
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public OneResponse info()
{
OneResponse response = info(client, filter);
processInfo(response);
return response;
}
/**
* Loads the xml representation of all the Virtual Machines in the pool.
*
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public OneResponse infoAll()
{
OneResponse response = infoAll(client);
processInfo(response);
return response;
}
/**
* Loads the xml representation of all the connected user's Virtual Machines.
*
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public OneResponse infoMine()
{
OneResponse response = infoMine(client);
processInfo(response);
return response;
}
/**
* Loads the xml representation of all the connected user's Virtual Machines and
* the ones in his group.
*
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public OneResponse infoGroup()
{
OneResponse response = infoGroup(client);
processInfo(response);
return response;
}
/**
* Retrieves all or part of the Virtual Machines in the pool. The Virtual Machines to retrieve
* can be also filtered by Id, specifying the first and last Id to include.
*
* @param filter Filter flag to use. Possible values:
* <ul>
* <li>{@link Pool#ALL}: All Virtual Machines</li>
* <li>{@link Pool#MINE}: Connected user's Virtual Machines</li>
* <li>{@link Pool#MINE_GROUP}: Connected user's Virtual Machines, and the ones in
* his group</li>
* <li>>= 0: UID User's Virtual Machines</li>
* </ul>
* @param startId Lowest Id to retrieve
* @param endId Biggest Id to retrieve
* @param state Numeric state of the Virtual Machines wanted
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public OneResponse info(int filter, int startId, int endId, int state)
{
OneResponse response = info(client, filter, startId, endId, state);
processInfo(response);
return response;
}
/**
* Retrieves the monitoring data for all or part of the Virtual
* Machines in the pool.
*
* @param filter Filter flag to use. Possible values:
* <ul>
* <li>{@link Pool#ALL}: All Virtual Machines</li>
* <li>{@link Pool#MINE}: Connected user's Virtual Machines</li>
* <li>{@link Pool#MINE_GROUP}: Connected user's Virtual Machines, and
* the ones in his group</li>
* <li>>= 0: UID User's Virtual Machines</li>
* </ul>
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public OneResponse monitoring(int filter)
{
return monitoring(client, filter);
}
public Iterator<VirtualMachine> iterator()
{
AbstractList<VirtualMachine> ab = new AbstractList<VirtualMachine>()
{
public int size()
{
return getLength();
}
public VirtualMachine get(int index)
{
return (VirtualMachine) item(index);
}
};
return ab.iterator();
}
/**
* Returns the Virtual Machine with the given Id from the pool. If it is not found,
* then returns null. The method {@link #info()} must be called before.
*
* @param id of the ACl rule to retrieve
* @return The Virtual Machine with the given Id, or null if it was not found.
*/
public VirtualMachine getById(int id)
{
return (VirtualMachine) super.getById(id);
}
}
| apache-2.0 |
robin13/elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java | 11275 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.cluster.metadata.MappingMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.InvalidIndexTemplateException;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
public class HiddenIndexIT extends ESIntegTestCase {
public void testHiddenIndexSearch() {
assertAcked(client().admin().indices().prepareCreate("hidden-index")
.setSettings(Settings.builder().put("index.hidden", true).build())
.get());
client().prepareIndex("hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
// default not visible to wildcard expansion
SearchResponse searchResponse =
client().prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get();
boolean matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
assertFalse(matchedHidden);
// direct access allowed
searchResponse = client().prepareSearch("hidden-index").setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get();
matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
assertTrue(matchedHidden);
// with indices option to include hidden
searchResponse = client().prepareSearch(randomFrom("*", "_all", "h*", "*index"))
.setSize(1000)
.setQuery(QueryBuilders.matchAllQuery())
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN)
.get();
matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
assertTrue(matchedHidden);
// implicit based on use of pattern starting with . and a wildcard
assertAcked(client().admin().indices().prepareCreate(".hidden-index")
.setSettings(Settings.builder().put("index.hidden", true).build())
.get());
client().prepareIndex(".hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
searchResponse = client().prepareSearch(randomFrom(".*", ".hidden-*"))
.setSize(1000)
.setQuery(QueryBuilders.matchAllQuery())
.get();
matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> ".hidden-index".equals(hit.getIndex()));
assertTrue(matchedHidden);
// make index not hidden
assertAcked(client().admin().indices().prepareUpdateSettings("hidden-index")
.setSettings(Settings.builder().put("index.hidden", false).build())
.get());
searchResponse =
client().prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get();
matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
assertTrue(matchedHidden);
}
public void testGlobalTemplatesDoNotApply() {
assertAcked(client().admin().indices().preparePutTemplate("a_global_template").setPatterns(List.of("*"))
.setMapping("foo", "type=text").get());
assertAcked(client().admin().indices().preparePutTemplate("not_global_template").setPatterns(List.of("a*"))
.setMapping("bar", "type=text").get());
assertAcked(client().admin().indices().preparePutTemplate("specific_template").setPatterns(List.of("a_hidden_index"))
.setMapping("baz", "type=text").get());
assertAcked(client().admin().indices().preparePutTemplate("unused_template").setPatterns(List.of("not_used"))
.setMapping("foobar", "type=text").get());
assertAcked(client().admin().indices().prepareCreate("a_hidden_index")
.setSettings(Settings.builder().put("index.hidden", true).build()).get());
GetMappingsResponse mappingsResponse = client().admin().indices().prepareGetMappings("a_hidden_index").get();
assertThat(mappingsResponse.mappings().size(), is(1));
MappingMetadata mappingMetadata = mappingsResponse.mappings().get("a_hidden_index");
assertNotNull(mappingMetadata);
Map<String, Object> propertiesMap = (Map<String, Object>) mappingMetadata.getSourceAsMap().get("properties");
assertNotNull(propertiesMap);
assertThat(propertiesMap.size(), is(2));
Map<String, Object> barMap = (Map<String, Object>) propertiesMap.get("bar");
assertNotNull(barMap);
assertThat(barMap.get("type"), is("text"));
Map<String, Object> bazMap = (Map<String, Object>) propertiesMap.get("baz");
assertNotNull(bazMap);
assertThat(bazMap.get("type"), is("text"));
}
public void testGlobalTemplateCannotMakeIndexHidden() {
InvalidIndexTemplateException invalidIndexTemplateException = expectThrows(InvalidIndexTemplateException.class,
() -> client().admin().indices().preparePutTemplate("a_global_template")
.setPatterns(List.of("*"))
.setSettings(Settings.builder().put("index.hidden", randomBoolean()).build())
.get());
assertThat(invalidIndexTemplateException.getMessage(), containsString("global templates may not specify the setting index.hidden"));
}
public void testNonGlobalTemplateCanMakeIndexHidden() {
assertAcked(client().admin().indices().preparePutTemplate("a_global_template")
.setPatterns(List.of("my_hidden_pattern*"))
.setMapping("foo", "type=text")
.setSettings(Settings.builder().put("index.hidden", true).build())
.get());
assertAcked(client().admin().indices().prepareCreate("my_hidden_pattern1").get());
GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("my_hidden_pattern1").get();
assertThat(getSettingsResponse.getSetting("my_hidden_pattern1", "index.hidden"), is("true"));
}
public void testAliasesForHiddenIndices() {
final String hiddenIndex = "hidden-index";
final String visibleAlias = "alias-visible";
final String hiddenAlias = "alias-hidden";
final String dotHiddenAlias = ".alias-hidden";
assertAcked(client().admin().indices().prepareCreate(hiddenIndex)
.setSettings(Settings.builder().put("index.hidden", true).build())
.get());
assertAcked(admin().indices().prepareAliases()
.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(hiddenIndex).alias(visibleAlias)));
// The index should be returned here when queried by name or by wildcard because the alias is visible
final GetAliasesRequestBuilder req = client().admin().indices().prepareGetAliases(visibleAlias);
GetAliasesResponse response = req.get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(visibleAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), nullValue());
response = client().admin().indices().prepareGetAliases("alias*").get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(visibleAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), nullValue());
// Now try with a hidden alias
assertAcked(admin().indices().prepareAliases()
.addAliasAction(IndicesAliasesRequest.AliasActions.remove().index(hiddenIndex).alias(visibleAlias))
.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(hiddenIndex).alias(hiddenAlias).isHidden(true)));
// Querying by name directly should get the right result
response = client().admin().indices().prepareGetAliases(hiddenAlias).get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(hiddenAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), equalTo(true));
// querying by wildcard should get the right result because the indices options include hidden by default
response = client().admin().indices().prepareGetAliases("alias*").get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(hiddenAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), equalTo(true));
// But we should get no results if we specify indices options that don't include hidden
response = client().admin().indices().prepareGetAliases("alias*")
.setIndicesOptions(IndicesOptions.strictExpandOpen()).get();
assertThat(response.getAliases().get(hiddenIndex), nullValue());
// Now try with a hidden alias that starts with a dot
assertAcked(admin().indices().prepareAliases()
.addAliasAction(IndicesAliasesRequest.AliasActions.remove().index(hiddenIndex).alias(hiddenAlias))
.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(hiddenIndex).alias(dotHiddenAlias).isHidden(true)));
// Check that querying by dot-prefixed pattern returns the alias
response = client().admin().indices().prepareGetAliases(".alias*").get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(dotHiddenAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), equalTo(true));
}
}
| apache-2.0 |
marktriggs/nyu-sakai-10.4 | kernel/kernel-impl/src/main/java/org/sakaiproject/site/impl/BaseSite.java | 48387 | /**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/trunk/sakai/admin-tools/su/src/java/org/sakaiproject/tool/su/SuTool.java $
* $Id: SuTool.java 5970 2006-02-15 03:07:19Z ggolden@umich.edu $
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2007, 2008 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.site.impl;
import java.util.Calendar;
import java.util.Collection;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import java.util.Vector;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.authz.api.AuthzGroup;
import org.sakaiproject.authz.api.AuthzGroupService;
import org.sakaiproject.authz.api.GroupNotDefinedException;
import org.sakaiproject.authz.api.Member;
import org.sakaiproject.authz.api.Role;
import org.sakaiproject.authz.api.RoleAlreadyDefinedException;
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.entity.api.ResourcePropertiesEdit;
import org.sakaiproject.site.api.Group;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SitePage;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.time.api.Time;
import org.sakaiproject.time.api.TimeService;
import org.sakaiproject.tool.api.Session;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.user.api.User;
import org.sakaiproject.user.api.UserDirectoryService;
import org.sakaiproject.user.api.UserNotDefinedException;
import org.sakaiproject.util.BaseResourceProperties;
import org.sakaiproject.util.BaseResourcePropertiesEdit;
import org.sakaiproject.util.StringUtil;
import org.sakaiproject.util.Validator;
import org.sakaiproject.util.Web;
import org.sakaiproject.util.Xml;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* <p>
* BaseSite is a base implementation of the Site API Site.
* </p>
*/
public class BaseSite implements Site
{
/** Our log (commons). */
private static Log M_log = LogFactory.getLog(BaseSite.class);
/** A fixed class serian number. */
private static final long serialVersionUID = 1L;
/** The event code for this edit. */
protected String m_event = null;
/** Active flag. */
protected boolean m_active = false;
/** List of groups deleted in this edit pass. */
protected Collection m_deletedGroups = new Vector();
/** The site id. */
protected String m_id = null;
/** The site title. */
protected String m_title = null;
/** The site short description. */
protected String m_shortDescription = null;
/** The HTML-safe version of the short description */
protected String m_htmlShortDescription = null;
/** The site description. */
protected String m_description = null;
/** The HTML-safe version of the description */
protected String m_htmlDescription = null;
/** Track whether this description has been loaded. */
protected boolean m_descriptionLoaded = false;
/** Track whether this site has been fully loaded. */
protected boolean m_fullyLoaded = false;
/** The name of the role given to users who join a joinable site. */
protected String m_joinerRole = null;
/** Is this site joinable. */
protected boolean m_joinable = false;
/** Published or not. */
protected boolean m_published = false;
/** The icon url. */
protected String m_icon = null;
/** The site info url. */
protected String m_info = null;
/** The properties. */
protected ResourcePropertiesEdit m_properties = null;
/** The list of site pages for this site. */
protected ResourceVector m_pages = null;
/** Set true while the pages have not yet been read in for a site. */
protected boolean m_pagesLazy = false;
/** The skin to use for this site. */
protected String m_skin = null;
/** The pubView flag. */
protected boolean m_pubView = false;
/** The site type. */
protected String m_type = null;
/** The created user id. */
protected String m_createdUserId = null;
/** The last modified user id. */
protected String m_lastModifiedUserId = null;
/** The time created. */
protected Time m_createdTime = null;
/** The time last modified. */
protected Time m_lastModifiedTime = null;
/** The list of site groups for this site. */
protected ResourceVector m_groups = null;
/** Set true while the groups have not yet been read in for a site. */
protected boolean m_groupsLazy = false;
/** The azg from the AuthzGroupService that is my AuthzGroup impl. */
protected AuthzGroup m_azg = null;
private AuthzGroupService authzGroupService;
/**
* Set to true if we have changed our azg, so it need to be written back on
* save.
*/
protected boolean m_azgChanged = false;
/**
* Set to true to use the site's page order, or false to let a toolOrder
* override the page order.
*/
protected boolean m_customPageOrdered = false;
private BaseSiteService siteService;
private SessionManager sessionManager;
private UserDirectoryService userDirectoryService;
/** Softly deleted data */
protected boolean m_isSoftlyDeleted = false;
protected Date m_softlyDeletedDate = null;
/**
* Construct.
*
* @param id
* The site id.
*/
public BaseSite(BaseSiteService siteService, String id)
{
setupServices(siteService, sessionManager, userDirectoryService);
m_id = id;
// setup for properties
m_properties = new BaseResourcePropertiesEdit();
// set up the page list
m_pages = new ResourceVector();
// set up the groups collection
m_groups = new ResourceVector();
// if the id is not null (a new site, rather than a reconstruction)
// add the automatic (live) properties
if (m_id != null) {
siteService.addLiveProperties(this);
}
}
/**
* Construct from another Site, exact.
*
* @param site
* The other site to copy values from.
* @param exact
* If true, we copy ids - else we generate new ones for site, page
* and tools.
*/
public BaseSite(BaseSiteService siteService, Site other)
{
this(siteService, other, true);
}
/**
* Construct from another Site.
*
* @param site
* The other site to copy values from.
* @param exact
* If true, we copy ids - else we generate new ones for site, page
* and tools.
*/
public BaseSite(BaseSiteService siteService, Site other, boolean exact)
{
setupServices(siteService, sessionManager, userDirectoryService);
BaseSite bOther = (BaseSite) other;
set(bOther, exact);
}
/**
* Construct from an existing definition, in xml.
*
* @param el
* The message in XML in a DOM element.
*/
public BaseSite(BaseSiteService siteService, Element el, TimeService timeService)
{
setupServices(siteService, sessionManager, userDirectoryService);
// setup for properties
m_properties = new BaseResourcePropertiesEdit();
// setup for page list
m_pages = new ResourceVector();
// setup for the groups list
m_groups = new ResourceVector();
m_id = el.getAttribute("id");
m_title = StringUtils.trimToNull(el.getAttribute("title"));
// description might be encripted
String tmpDesc = StringUtils.trimToNull(el.getAttribute("description"));
if (tmpDesc == null)
{
tmpDesc = StringUtils.trimToNull(Xml.decodeAttribute(el,
"description-enc"));
}
setDescription(tmpDesc);
m_descriptionLoaded = true;
// short description might be encripted
String tmpShortDesc = StringUtils.trimToNull(el.getAttribute("short-description"));
if (tmpShortDesc == null)
{
tmpShortDesc = StringUtils.trimToNull(Xml.decodeAttribute(el,
"short-description-enc"));
}
setShortDescription(tmpShortDesc);
m_joinable = Boolean.valueOf(el.getAttribute("joinable")).booleanValue();
m_joinerRole = StringUtils.trimToNull(el.getAttribute("joiner-role"));
String published = StringUtils.trimToNull(el.getAttribute("published"));
if (published == null)
{
// read the old "status" (this file 1.42 and before) 1-un 2-pub
published = StringUtils.trimToNull(el.getAttribute("status"));
if (published != null)
{
published = Boolean.valueOf("2".equals(published)).toString();
}
}
m_published = Boolean.valueOf(published).booleanValue();
m_icon = StringUtils.trimToNull(el.getAttribute("icon"));
m_info = StringUtils.trimToNull(el.getAttribute("info"));
m_skin = StringUtils.trimToNull(el.getAttribute("skin"));
m_createdUserId = StringUtils.trimToNull(el.getAttribute("created-id"));
m_lastModifiedUserId = StringUtils.trimToNull(el.getAttribute("modified-id"));
String time = StringUtils.trimToNull(el.getAttribute("created-time"));
if (time != null)
{
m_createdTime = timeService.newTimeGmt(time);
}
time = StringUtils.trimToNull(el.getAttribute("modified-time"));
if (time != null)
{
m_lastModifiedTime = timeService.newTimeGmt(time);
}
String customOrder = StringUtils.trimToNull(el.getAttribute("customPageOrdered"));
if (customOrder == null)
{
m_customPageOrdered = false;
}
else
{
m_customPageOrdered = Boolean.valueOf(customOrder).booleanValue();
}
// get pubView setting - but old versions (pre 1.42 of this file) won't
// have it and will have a property instead
String pubViewValue = StringUtils.trimToNull(el.getAttribute("pubView"));
// get the type - but old versions (pre 1.42 of this file) won't have it
// and will have a property instead
String typeValue = StringUtils.trimToNull(el.getAttribute("type"));
// the children (properties and page list)
NodeList children = el.getChildNodes();
for (int i = 0; i < children.getLength(); i++)
{
Node child = children.item(i);
if (child.getNodeType() != Node.ELEMENT_NODE) continue;
Element element = (Element) child;
// look for properties
if (element.getTagName().equals("properties"))
{
// re-create properties
m_properties = new BaseResourcePropertiesEdit(element);
// look for pubview (pre 1.42 of this file) in properties
if (pubViewValue == null)
{
pubViewValue = m_properties.getProperty("CTNG:site-include");
if (pubViewValue == null)
{
pubViewValue = m_properties.getProperty("site-include");
}
}
m_properties.removeProperty("CTNG:site-include");
m_properties.removeProperty("site-include");
// look for type (pre 1.42 of this file) in properties (two
// possibilities)
if (typeValue == null)
{
typeValue = m_properties.getProperty("SAKAI:site-type");
if (typeValue == null)
{
typeValue = m_properties.getProperty("CTNG:site-type");
}
}
m_properties.removeProperty("SAKAI:site-type");
m_properties.removeProperty("CTNG:site-type");
// look for short description (pre 1.42 of this file) in
// properties
if (m_shortDescription == null)
{
m_shortDescription = m_properties
.getProperty("CTNG:short-description");
if (m_shortDescription == null)
{
m_shortDescription = m_properties
.getProperty("short-description");
}
}
m_properties.removeProperty("CTNG:short-description");
m_properties.removeProperty("short-description");
// pull out some properties into fields to convert old (pre
// 1.42) versions
if (m_createdUserId == null)
{
m_createdUserId = m_properties.getProperty("CHEF:creator");
}
if (m_lastModifiedUserId == null)
{
m_lastModifiedUserId = m_properties.getProperty("CHEF:modifiedby");
}
if (m_createdTime == null)
{
try
{
m_createdTime = m_properties.getTimeProperty("DAV:creationdate");
}
catch (Exception ignore)
{
}
}
if (m_lastModifiedTime == null)
{
try
{
m_lastModifiedTime = m_properties
.getTimeProperty("DAV:getlastmodified");
}
catch (Exception ignore)
{
}
}
m_properties.removeProperty("CHEF:creator");
m_properties.removeProperty("CHEF:modifiedby");
m_properties.removeProperty("DAV:creationdate");
m_properties.removeProperty("DAV:getlastmodified");
}
// look for the page list
else if (element.getTagName().equals("pages"))
{
NodeList pagesNodes = element.getChildNodes();
for (int p = 0; p < pagesNodes.getLength(); p++)
{
Node pageNode = pagesNodes.item(p);
if (pageNode.getNodeType() != Node.ELEMENT_NODE) continue;
Element pageEl = (Element) pageNode;
if (!pageEl.getTagName().equals("page")) continue;
BaseSitePage page = new BaseSitePage(siteService,pageEl, this);
m_pages.add(page);
}
// TODO: else if ( "groups")
}
}
// set the pubview, now it's found in either the attribute or the
// properties
if (pubViewValue != null)
{
m_pubView = Boolean.valueOf(pubViewValue).booleanValue();
}
else
{
m_pubView = false;
}
// set the type, now it's found in either the attribute or the
// properties
m_type = typeValue;
}
/**
* ReConstruct.
*
* @param id
* @param title
* @param type
* @param shortDesc
* @param description
* @param iconUrl
* @param infoUrl
* @param skin
* @param published
* @param joinable
* @param pubView
* @param joinRole
* @param isSpecial
* @param isUser
* @param createdBy
* @param createdOn
* @param modifiedBy
* @param modifiedOn
*/
public BaseSite(BaseSiteService siteService, String id, String title, String type, String shortDesc,
String description, String iconUrl, String infoUrl, String skin,
boolean published, boolean joinable, boolean pubView, String joinRole,
boolean isSpecial, boolean isUser, String createdBy, Time createdOn,
String modifiedBy, Time modifiedOn, boolean customPageOrdered,
boolean isSoftlyDeleted, Date softlyDeletedDate, SessionManager sessionManager, UserDirectoryService userDirectoryService)
{
// Since deferred description loading is the edge case, assume the description is real.
// This could be masked by extending String and using instanceof, or extending BaseSite to mark lazy instances,
// but it not sure which is cleanest for now.
this(siteService, id, title, type, shortDesc, description, iconUrl, infoUrl, skin, published, joinable, pubView, joinRole,
isSpecial, isUser, createdBy, createdOn, modifiedBy, modifiedOn, customPageOrdered, isSoftlyDeleted, softlyDeletedDate,
true, sessionManager, userDirectoryService);
}
public BaseSite(BaseSiteService siteService, String id, String title, String type, String shortDesc,
String description, String iconUrl, String infoUrl, String skin,
boolean published, boolean joinable, boolean pubView, String joinRole,
boolean isSpecial, boolean isUser, String createdBy, Time createdOn,
String modifiedBy, Time modifiedOn, boolean customPageOrdered,
boolean isSoftlyDeleted, Date softlyDeletedDate, boolean descriptionLoaded, SessionManager sessionManager, UserDirectoryService userDirectoryService)
{
setupServices(siteService, sessionManager, userDirectoryService);
// setup for properties
m_properties = new BaseResourcePropertiesEdit();
// set up the page list
m_pages = new ResourceVector();
// set up the groups collection
m_groups = new ResourceVector();
m_id = id;
m_title = title;
m_type = type;
setShortDescription(shortDesc);
setDescription(description);
m_descriptionLoaded = descriptionLoaded;
m_icon = iconUrl;
m_info = infoUrl;
m_skin = skin;
m_published = published;
m_joinable = joinable;
m_pubView = pubView;
m_joinerRole = joinRole;
// TODO: isSpecial
// TODO: isUser
m_createdUserId = createdBy;
m_lastModifiedUserId = modifiedBy;
m_createdTime = createdOn;
m_lastModifiedTime = modifiedOn;
m_customPageOrdered = customPageOrdered;
// setup for properties, but mark them lazy since we have not yet
// established them from data
((BaseResourcePropertiesEdit) m_properties).setLazy(true);
m_pagesLazy = true;
m_groupsLazy = true;
// soft site deletions - new sites get defaults
m_isSoftlyDeleted = isSoftlyDeleted;
m_softlyDeletedDate = softlyDeletedDate;
}
/**
* Sets up the services needed by the BaseSite to operate
* @param siteService the BSS
* @param sessionManager the SM
* @param userDirectoryService the UDS
* @throws java.lang.IllegalStateException if the services would be null
*/
void setupServices(BaseSiteService siteService, SessionManager sessionManager, UserDirectoryService userDirectoryService) {
this.siteService = siteService;
if (this.siteService == null) {
this.siteService = (BaseSiteService) ComponentManager.get(SiteService.class);
if (this.siteService == null) {
throw new IllegalStateException("Cannot get the SiteService when constructing BaseSite");
}
}
this.authzGroupService = this.siteService.authzGroupService();
this.sessionManager = sessionManager;
if (this.sessionManager == null) {
this.sessionManager = (SessionManager) ComponentManager.get(SessionManager.class);
if (this.sessionManager == null) {
throw new IllegalStateException("Cannot get the SessionManager when constructing BaseSite");
}
}
this.userDirectoryService = userDirectoryService;
if (this.userDirectoryService == null) {
this.userDirectoryService = (UserDirectoryService) ComponentManager.get(UserDirectoryService.class);
if (this.userDirectoryService == null) {
throw new IllegalStateException("Cannot get the UserDirectoryService when constructing BaseSite");
}
}
}
/**
* Set me to be a deep copy of other (all but my id).
*
* Note that this no longer triggers lazy loading as of KNL-1011. This should
* not cause any issues because the getters still trigger fetching by default.
* If a copy is made of a site that is not fully loaded, it should stay lazy,
* rather than accidentally triggering fetches.
*
* @param other
* the other to copy.
* @param exact
* If true, we copy ids - else we generate new ones for site, page
* and tools.
*/
protected void set(BaseSite other, boolean exact)
{
// if exact, set the id, else assume the id was already set
if (exact)
{
m_id = other.m_id;
}
m_title = other.m_title;
m_shortDescription = other.m_shortDescription;
m_htmlShortDescription = other.m_htmlShortDescription;
m_description = other.m_description;
m_htmlDescription = other.m_htmlDescription;
m_descriptionLoaded = other.m_descriptionLoaded;
m_joinable = other.m_joinable;
m_joinerRole = other.m_joinerRole;
m_published = other.m_published;
m_icon = other.m_icon;
m_info = other.m_info;
m_skin = other.m_skin;
m_type = other.m_type;
m_pubView = other.m_pubView;
m_customPageOrdered = other.m_customPageOrdered;
if (this.siteService == null) {
this.siteService = (BaseSiteService) ComponentManager.get(SiteService.class);
if (this.siteService == null) {
M_log.error("Cannot set the SiteService when set from BaseSite");
}
}
sessionManager = other.sessionManager;
if (this.sessionManager == null) {
this.sessionManager = (SessionManager) ComponentManager.get(SessionManager.class);
if (this.sessionManager == null) {
M_log.error("Cannot set the SessionManager when set from BaseSite");
}
}
userDirectoryService = other.userDirectoryService;
if (this.userDirectoryService == null) {
this.userDirectoryService = (UserDirectoryService) ComponentManager.get(UserDirectoryService.class);
if (this.userDirectoryService == null) {
M_log.error("Cannot set the UserDirectoryService when set from BaseSite");
}
}
//site copies keep soft site deletion flags
m_isSoftlyDeleted = other.m_isSoftlyDeleted;
m_softlyDeletedDate = other.m_softlyDeletedDate;
if (exact)
{
m_createdUserId = other.m_createdUserId;
}
else
{
m_createdUserId = userDirectoryService.getCurrentUser().getId();
}
m_lastModifiedUserId = other.m_lastModifiedUserId;
if (other.m_createdTime != null)
m_createdTime = (Time) other.m_createdTime.clone();
if (other.m_lastModifiedTime != null)
m_lastModifiedTime = (Time) other.m_lastModifiedTime.clone();
// We make sure to avoid triggering fetching by passing false to getProperties
m_properties = new BaseResourcePropertiesEdit();
ResourceProperties pOther = other.getProperties(false);
if (exact)
{
m_properties.addAll(pOther);
}
else
{
Iterator l = pOther.getPropertyNames();
while (l.hasNext())
{
String pOtherName = (String) l.next();
m_properties.addProperty(pOtherName, pOther.getProperty(pOtherName)
.replaceAll(other.getId(), getId()));
}
}
((BaseResourcePropertiesEdit) m_properties)
.setLazy(((BaseResourceProperties) pOther).isLazy());
// deep copy the pages, but avoid triggering fetching by passing false to getPages
m_pages = new ResourceVector();
for (Iterator iPages = other.getPages(false).iterator(); iPages.hasNext();)
{
BaseSitePage page = (BaseSitePage) iPages.next();
m_pages.add(new BaseSitePage(siteService,page, this, exact));
}
m_pagesLazy = other.m_pagesLazy;
// deep copy the groups, but avoid triggering fetching by passing false to getGroups
m_groups = new ResourceVector();
for (Iterator iGroups = other.getGroups(false).iterator(); iGroups.hasNext();)
{
Group group = (Group) iGroups.next();
m_groups.add(new BaseGroup(siteService, group, this, exact));
}
m_groupsLazy = other.m_groupsLazy;
m_fullyLoaded = other.m_fullyLoaded;
}
/**
* @inheritDoc
*/
public String getId()
{
if (m_id == null) return "";
return m_id;
}
/**
* @inheritDoc
*/
public String getUrl()
{
Session s = sessionManager.getCurrentSession();
String controllingPortal = (String) s.getAttribute("sakai-controlling-portal");
String siteString = "/site/";
if (controllingPortal != null)
{
siteString = "/" + controllingPortal + "/";
}
return siteService
.serverConfigurationService().getPortalUrl()
+ siteString + m_id;
}
/**
* @inheritDoc
*/
public String getReference()
{
return siteService.siteReference(m_id);
}
/**
* @inheritDoc
*/
public String getReference(String rootProperty)
{
return getReference();
}
/**
* @inheritDoc
*/
public String getUrl(String rootProperty)
{
return getUrl();
}
/**
* @inheritDoc
*/
public ResourceProperties getProperties()
{
// Default to loading the properties if lazy
return getProperties(true);
}
/**
* Access the Site's properties, with control over fetching of lazy collections.
*
* The allowFetch flag is typically passed as true, but passed as false for
* fine-grained control while building copies, etc. This signature is not provided
* on the Site interface and is only intended for use within the implementation package.
*
* @param allowFetch
* when true, fetch properties if not loaded;
* when false, avoid fetching and return the properties collection as-is
* @return The Site's properties.
*
*/
public ResourceProperties getProperties(boolean allowFetch)
{
// if lazy, resolve unless requested to avoid fetching (as for copy constructor)
if (allowFetch && ((BaseResourceProperties) m_properties).isLazy())
{
siteService.storage().readSiteProperties(
this, m_properties);
((BaseResourcePropertiesEdit) m_properties).setLazy(false);
}
return m_properties;
}
/**
* {@inheritDoc}
*/
public User getCreatedBy()
{
try
{
return userDirectoryService.getUser(m_createdUserId);
}
catch (Exception e)
{
return userDirectoryService.getAnonymousUser();
}
}
/**
* {@inheritDoc}
*/
public User getModifiedBy()
{
try
{
return userDirectoryService.getUser(m_lastModifiedUserId);
}
catch (Exception e)
{
return userDirectoryService.getAnonymousUser();
}
}
/**
* {@inheritDoc}
*/
public Time getCreatedTime()
{
return m_createdTime;
}
public Date getCreatedDate() {
return new Date(m_createdTime.getTime());
}
/**
* {@inheritDoc}
*/
public Time getModifiedTime()
{
return m_lastModifiedTime;
}
public Date getModifiedDate() {
return new Date(m_lastModifiedTime.getTime());
}
/**
* @inheritDoc
*/
public String getTitle()
{
// if set here, use the setting
if (m_title != null) return m_title;
// if not otherwise set, use the id
return getId();
}
/**
* @inheritDoc
*/
public String getShortDescription()
{
return m_shortDescription;
}
/** HTML escape and store the site's short description. */
protected void escapeShortDescription()
{
m_htmlShortDescription = Web.escapeHtml(m_shortDescription);
}
/**
* @inheritDoc
*/
public String getHtmlShortDescription()
{
return m_htmlShortDescription;
}
/** HTML escape and store the site's full description. */
protected void escapeDescription()
{
m_htmlDescription = Web.escapeHtml(m_description);
}
/**
* @inheritDoc
*/
public String getDescription()
{
return m_description;
}
/**
* @inheritDoc
*/
public String getHtmlDescription()
{
return m_htmlDescription;
}
/**
* @inheritDoc
*/
public boolean isJoinable()
{
return m_joinable;
}
/**
* @inheritDoc
*/
public String getJoinerRole()
{
return m_joinerRole;
}
/**
* {@inheritDoc}
*/
public boolean isPublished()
{
return m_published;
}
/**
* @inheritDoc
*/
public String getSkin()
{
return m_skin;
}
/**
* @inheritDoc
*/
public String getIconUrl()
{
return m_icon;
}
/**
* {@inheritDoc}
*/
public String getIconUrlFull()
{
return siteService
.convertReferenceUrl(m_icon);
}
/**
* @inheritDoc
*/
public String getInfoUrl()
{
return m_info;
}
/**
* {@inheritDoc}
*/
public String getInfoUrlFull()
{
if (m_info == null) return null;
return siteService
.convertReferenceUrl(m_info);
}
/**
* {@inheritDoc}
*/
public List getPages()
{
// Default to loading the pages if lazy
return getPages(true);
}
/**
* Access the Site's list of pages, with control over fetching of lazy collections.
*
* The allowFetch flag is typically passed as true, but passed as false for
* fine-grained control while building copies, etc. This signature is not provided
* on the Site interface and is only intended for use within the implementation package.
*
* @param allowFetch
* when true, fetch pages if not loaded;
* when false, avoid fetching and return the page list as-is
* @return The Site's list of SitePages.
*
*/
public List getPages(boolean allowFetch)
{
if (allowFetch && m_pagesLazy)
{
siteService.storage().readSitePages(this,
m_pages);
m_pagesLazy = false;
}
return m_pages;
}
/**
* {@inheritDoc}
*/
public Collection getGroups()
{
// Default to loading the groups if lazy
return getGroups(true);
}
/**
* Access the Site's list of groups, with control over fetching of lazy collections.
*
* The allowFetch flag is typically passed as true, but passed as false for
* fine-grained control while building copies, etc. This signature is not provided
* on the Site interface and is only intended for use within the implementation package.
*
* @param allowFetch
* when true, fetch groups if not loaded;
* when false, avoid fetching and return the group list as-is
* @return The Site's list of Groups.
*
*/
public Collection getGroups(boolean allowFetch)
{
// Avoid fetching if requested (as for copy constructor)
if (allowFetch && m_groupsLazy)
{
siteService.storage().readSiteGroups(
this, m_groups);
m_groupsLazy = false;
}
return m_groups;
}
/**
* {@inheritDoc}
*/
public Collection<String> getMembersInGroups(Set<String> groupIds) {
@SuppressWarnings("unchecked")
Collection<Group> siteGroups = getGroups();
HashSet<String> siteGroupRefs = new HashSet<String>(siteGroups.size());
for (Group group : siteGroups) {
if (groupIds == null || // null groupIds includes all groups in the site
groupIds.contains(group.getId())) {
siteGroupRefs.add(group.getReference());
}
}
Collection<String> membersInGroups = authzGroupService.getAuthzUsersInGroups(siteGroupRefs);
return membersInGroups;
}
/**
* {@inheritDoc}
*/
public Collection getGroupsWithMember(String userId)
{
Collection siteGroups = getGroups();
ArrayList<String> siteGroupRefs = new ArrayList<String>(siteGroups.size());
for ( Iterator it=siteGroups.iterator(); it.hasNext(); )
siteGroupRefs.add( ((Group)it.next()).getReference() );
List groups = authzGroupService.getAuthzUserGroupIds(siteGroupRefs, userId);
Collection<Group> rv = new Vector<Group>();
for (Iterator i = groups.iterator(); i.hasNext();)
{
Member m = null;
Group g = getGroup( (String)i.next() );
if ( g != null )
m = g.getMember(userId);
if ((m != null) && (m.isActive()))
rv.add(g);
}
return rv;
}
/**
* {@inheritDoc}
*/
public Collection getGroupsWithMemberHasRole(String userId, String role)
{
Collection siteGroups = getGroups();
ArrayList<String> siteGroupRefs = new ArrayList<String>(siteGroups.size());
for ( Iterator it=siteGroups.iterator(); it.hasNext(); )
siteGroupRefs.add( ((Group)it.next()).getReference() );
List groups = authzGroupService.getAuthzUserGroupIds(siteGroupRefs, userId);
Collection<Group> rv = new Vector<Group>();
for (Iterator i = groups.iterator(); i.hasNext();)
{
Member m = null;
Group g = getGroup( (String)i.next() );
if ( g != null )
m = g.getMember(userId);
if ((m != null) && (m.isActive()) && (m.getRole().getId().equals(role)))
rv.add(g);
}
return rv;
}
/**
* {@inheritDoc}
*/
public boolean hasGroups()
{
Collection groups = getGroups();
return !groups.isEmpty();
}
/**
* {@inheritDoc}
*/
public void loadAll()
{
// Load up the full description if needed. If we fail to find the site in the database,
// mark the description as loaded anyway to avoid retrying if multiple calls come in.
if (!m_descriptionLoaded)
{
Site fullSite = siteService.storage().get(getId());
if (fullSite != null)
{
setDescription(fullSite.getDescription());
}
m_descriptionLoaded = true;
}
// first, pages
getPages();
// KNL-259 - Avoiding single-page fetch of properties by way of BaseToolConfiguration constructor
siteService.storage().readSitePageProperties(this);
for (Iterator i = getPages().iterator(); i.hasNext();)
{
BaseSitePage page = (BaseSitePage) i.next();
((BaseResourcePropertiesEdit) page.m_properties).setLazy(false);
}
// next, tools from all pages, all at once
siteService.storage().readSiteTools(this);
// get groups, all at once
getGroups();
// now all properties
siteService.storage()
.readAllSiteProperties(this);
m_fullyLoaded = true;
}
/**
* {@inheritDoc}
*/
public List getOrderedPages()
{
// if we are set to use our custom page order, do so
if (m_customPageOrdered) return getPages();
List order = siteService
.serverConfigurationService().getToolOrder(getType());
if (order.isEmpty()) return getPages();
Map<String, String> pageCategoriesByTool = siteService.serverConfigurationService().getToolToCategoryMap(
getType());
// get a copy we can modify without changing the site!
List pages = new Vector(getPages());
// find any pages that include the tool type for each tool in the
// ordering, move them into the newOrder and remove from the old
List newOrder = new Vector();
// for each entry in the order
for (Iterator i = order.iterator(); i.hasNext();)
{
String toolId = (String) i.next();
// find any pages that have this tool
for (Iterator p = pages.iterator(); p.hasNext();)
{
SitePage page = (SitePage) p.next();
page.getProperties().removeProperty(SitePage.PAGE_CATEGORY_PROP);
List tools = page.getTools();
for (Iterator t = tools.iterator(); t.hasNext();)
{
ToolConfiguration tool = (ToolConfiguration) t.next();
if (tool.getToolId().equals(toolId))
{
// this page has this tool, so move it from the pages to
// the newOrder
newOrder.add(page);
if (pageCategoriesByTool.get(toolId) != null)
{
page.getProperties().addProperty(SitePage.PAGE_CATEGORY_PROP,
pageCategoriesByTool.get(toolId));
}
p.remove();
break;
}
}
}
}
// add any remaining
newOrder.addAll(pages);
return newOrder;
}
/**
* {@inheritDoc}
*/
public SitePage getPage(String id)
{
return (SitePage) ((ResourceVector) getPages()).getById(id);
}
/**
* {@inheritDoc}
*/
public ToolConfiguration getTool(String id)
{
// search the pages
for (Iterator iPages = getPages().iterator(); iPages.hasNext();)
{
SitePage page = (SitePage) iPages.next();
ToolConfiguration tool = page.getTool(id);
if (tool != null) return tool;
}
return null;
}
/**
* {@inheritDoc}
*/
public Collection getTools(String commonToolId)
{
String[] toolIds = new String[1];
toolIds[0] = commonToolId;
return getTools(toolIds);
}
/**
* {@inheritDoc}
*/
public ToolConfiguration getToolForCommonId(String commonToolId)
{
Collection col = getTools(commonToolId);
if (col == null) return null;
if (col.size() == 0) return null;
return (ToolConfiguration) col.iterator().next(); // Return first
// element
}
/**
* {@inheritDoc}
*/
public Collection getTools(String[] toolIds)
{
List rv = new Vector();
if ((toolIds == null) || (toolIds.length == 0)) return rv;
// search the pages
for (Iterator iPages = getPages().iterator(); iPages.hasNext();)
{
SitePage page = (SitePage) iPages.next();
rv.addAll(page.getTools(toolIds));
}
return rv;
}
/**
* {@inheritDoc}
*/
public Group getGroup(String id)
{
if (id == null) return null;
// if this is a reference, starting with a "/", parse it, make sure it's
// a group, in this site, and pull the id
if (id.startsWith(Entity.SEPARATOR))
{
Reference ref = siteService
.entityManager().newReference(id);
if ((SiteService.APPLICATION_ID.equals(ref.getType()))
&& (SiteService.GROUP_SUBTYPE.equals(ref.getSubType()))
&& (m_id.equals(ref.getContainer())))
{
return (Group) ((ResourceVector) getGroups()).getById(ref.getId());
}
return null;
}
return (Group) ((ResourceVector) getGroups()).getById(id);
}
/**
* {@inheritDoc}
*/
public String getType()
{
return m_type;
}
/**
* @inheritDoc
*/
public boolean isType(Object type)
{
if (type == null) return true;
String myType = getType();
if (type instanceof String[])
{
for (int i = 0; i < ((String[]) type).length; i++)
{
String test = ((String[]) type)[i];
if ((test != null) && (test.equals(myType)))
{
return true;
}
}
}
else if (type instanceof Collection)
{
return ((Collection) type).contains(myType);
}
else if (type instanceof String)
{
return type.equals(myType);
}
return false;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
BaseSite other = (BaseSite) obj;
if (m_id == null) {
if (other.m_id != null)
return false;
} else if (!m_id.equals(other.m_id))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((m_id == null) ? 0 : m_id.hashCode());
return result;
}
/**
* @inheritDoc
*/
public int compareTo(Object obj)
{
if (!(obj instanceof Site)) throw new ClassCastException();
// if the object are the same, say so
if (obj == this) return 0;
// start the compare by comparing their sort names
int compare = getTitle().compareTo(((Site) obj).getTitle());
// if these are the same
if (compare == 0)
{
// sort based on (unique) id
compare = getId().compareTo(((Site) obj).getId());
}
return compare;
}
/**
* {@inheritDoc}
*/
public boolean isPubView()
{
return m_pubView;
}
/**
* {@inheritDoc}
*/
public Element toXml(Document doc, Stack stack)
{
Element site = doc.createElement("site");
if (stack.isEmpty())
{
doc.appendChild(site);
}
else
{
((Element) stack.peek()).appendChild(site);
}
site.setAttribute("id", getId());
if (m_title != null) site.setAttribute("title", m_title);
// encode the short description
if (m_shortDescription != null)
Xml.encodeAttribute(site, "short-description-enc", m_shortDescription);
// encode the description
if (m_description != null)
Xml.encodeAttribute(site, "description-enc", m_description);
site.setAttribute("joinable", Boolean.valueOf(m_joinable).toString());
if (m_joinerRole != null) site.setAttribute("joiner-role", m_joinerRole);
site.setAttribute("published", Boolean.valueOf(m_published).toString());
if (m_icon != null) site.setAttribute("icon", m_icon);
if (m_info != null) site.setAttribute("info", m_info);
if (m_skin != null) site.setAttribute("skin", m_skin);
site.setAttribute("pubView", Boolean.valueOf(m_pubView).toString());
site.setAttribute("customPageOrdered", Boolean.valueOf(m_customPageOrdered)
.toString());
site.setAttribute("type", m_type);
site.setAttribute("created-id", m_createdUserId);
site.setAttribute("modified-id", m_lastModifiedUserId);
site.setAttribute("created-time", m_createdTime.toString());
site.setAttribute("modified-time", m_lastModifiedTime.toString());
// properties
stack.push(site);
getProperties().toXml(doc, stack);
stack.pop();
// site pages
Element list = doc.createElement("pages");
site.appendChild(list);
stack.push(list);
for (Iterator iPages = getPages().iterator(); iPages.hasNext();)
{
BaseSitePage page = (BaseSitePage) iPages.next();
page.toXml(doc, stack);
}
stack.pop();
// TODO: site groups
return site;
}
/**
* {@inheritDoc}
*/
public void setTitle(String title)
{
m_title = StringUtils.trimToNull(title);
}
/**
* {@inheritDoc}
*/
public void setShortDescription(String shortDescripion)
{
m_shortDescription = StringUtils.trimToNull(shortDescripion);
escapeShortDescription();
}
/**
* {@inheritDoc}
*/
public void setDescription(String description)
{
m_description = StringUtils.trimToNull(description);
escapeDescription();
}
/**
* {@inheritDoc}
*/
public void setJoinable(boolean joinable)
{
m_joinable = joinable;
}
/**
* {@inheritDoc}
*/
public void setJoinerRole(String role)
{
m_joinerRole = role;
}
/**
* {@inheritDoc}
*/
public void setPublished(boolean published)
{
m_published = published;
}
/**
* {@inheritDoc}
*/
public void setSkin(String skin)
{
if (Validator.checkSiteSkin(skin)) {
m_skin = skin;
}
}
/**
* {@inheritDoc}
*/
public void setIconUrl(String url)
{
m_icon = StringUtils.trimToNull(url);
}
/**
* {@inheritDoc}
*/
public void setInfoUrl(String url)
{
m_info = StringUtils.trimToNull(url);
}
/**
* {@inheritDoc}
*/
public SitePage addPage()
{
BaseSitePage page = new BaseSitePage(siteService,this);
getPages().add(page);
return page;
}
/**
* @inheritDoc
*/
public void removePage(SitePage page)
{
getPages().remove(page);
}
/**
* Access the event code for this edit.
*
* @return The event code for this edit.
*/
protected String getEvent()
{
return m_event;
}
/**
* Set the event code for this edit.
*
* @param event
* The event code for this edit.
*/
protected void setEvent(String event)
{
m_event = event;
}
/**
* @inheritDoc
*/
public ResourcePropertiesEdit getPropertiesEdit()
{
// if lazy, resolve
if (((BaseResourceProperties) m_properties).isLazy())
{
siteService.storage().readSiteProperties(
this, m_properties);
((BaseResourcePropertiesEdit) m_properties).setLazy(false);
}
return m_properties;
}
/**
* {@inheritDoc}
*/
public void setType(String type)
{
if (Validator.checkSiteType(type)) {
m_type = type;
}
}
/**
* {@inheritDoc}
*/
public void setPubView(boolean pubView)
{
m_pubView = pubView;
}
/**
* {@inheritDoc}
*/
public boolean isCustomPageOrdered()
{
return m_customPageOrdered;
}
/**
* {@inheritDoc}
*/
public void setCustomPageOrdered(boolean setting)
{
m_customPageOrdered = setting;
}
/**
* Enable editing.
*/
protected void activate()
{
m_active = true;
}
/**
* @inheritDoc
*/
public boolean isActiveEdit()
{
return m_active;
}
/**
* Close the edit object - it cannot be used after this.
*/
protected void closeEdit()
{
m_active = false;
}
/**
* @inheritDoc
*/
public void regenerateIds()
{
// deep copy the pages
ResourceVector newPages = new ResourceVector();
for (Iterator iPages = getPages().iterator(); iPages.hasNext();)
{
BaseSitePage page = (BaseSitePage) iPages.next();
newPages.add(new BaseSitePage(siteService,page, this, false));
}
m_pages = newPages;
}
/**
* {@inheritDoc}
*/
public Group addGroup()
{
Group rv = new BaseGroup(siteService, this);
m_groups.add(rv);
return rv;
}
/**
* {@inheritDoc}
*/
public void removeGroup(Group group)
{
// remove it
m_groups.remove(group);
// track so we can clean up related on commit
m_deletedGroups.add(group);
}
/**
* Access (find if needed) the azg from the AuthzGroupService that
* implements my grouping.
*
* @return My azg.
*/
protected AuthzGroup getAzg()
{
if (m_azg == null)
{
try
{
m_azg = authzGroupService.getAuthzGroup(getReference());
}
catch (GroupNotDefinedException e)
{
try
{
// create the site's azg, but don't store it yet (that
// happens if save is called)
// try the site created-by user for the maintain role in the
// site
String userId = getCreatedBy().getId();
if (userId != null)
{
// make sure it's valid
try
{
userDirectoryService.getUser(userId);
}
catch (UserNotDefinedException e1)
{
userId = null;
}
}
// use the current user if needed
if (userId == null)
{
User user = userDirectoryService.getCurrentUser();
userId = user.getId();
}
// find the template for the new azg
String groupAzgTemplate = siteService.siteAzgTemplate(this);
AuthzGroup template = null;
try
{
template = authzGroupService.getAuthzGroup(groupAzgTemplate);
}
catch (Exception e1)
{
try
{
// if the template is not defined, try the fall back
// template
template = authzGroupService.getAuthzGroup("!site.template");
}
catch (Exception e2)
{
}
}
m_azg = authzGroupService.newAuthzGroup(getReference(), template,
userId);
m_azgChanged = true;
}
catch (Exception t)
{
M_log.warn("getAzg: " + t);
}
}
}
return m_azg;
}
public void addMember(String userId, String roleId, boolean active, boolean provided)
{
m_azgChanged = true;
getAzg().addMember(userId, roleId, active, provided);
}
public Role addRole(String id) throws RoleAlreadyDefinedException
{
m_azgChanged = true;
return getAzg().addRole(id);
}
public Role addRole(String id, Role other) throws RoleAlreadyDefinedException
{
m_azgChanged = true;
return getAzg().addRole(id, other);
}
public String getMaintainRole()
{
return getAzg().getMaintainRole();
}
public Member getMember(String userId)
{
return getAzg().getMember(userId);
}
public Set getMembers()
{
return getAzg().getMembers();
}
public String getProviderGroupId()
{
return getAzg().getProviderGroupId();
}
public Role getRole(String id)
{
return getAzg().getRole(id);
}
public Set getRoles()
{
return getAzg().getRoles();
}
public Set getRolesIsAllowed(String function)
{
return getAzg().getRolesIsAllowed(function);
}
public Role getUserRole(String userId)
{
return getAzg().getUserRole(userId);
}
public Set getUsers()
{
return getAzg().getUsers();
}
public Set getUsersHasRole(String role)
{
return getAzg().getUsersHasRole(role);
}
public Set getUsersIsAllowed(String function)
{
return getAzg().getUsersIsAllowed(function);
}
public boolean hasRole(String userId, String role)
{
return getAzg().hasRole(userId, role);
}
public boolean isAllowed(String userId, String function)
{
return getAzg().isAllowed(userId, function);
}
public boolean isEmpty()
{
return getAzg().isEmpty();
}
public void removeMember(String userId)
{
m_azgChanged = true;
getAzg().removeMember(userId);
}
public void removeMembers()
{
m_azgChanged = true;
getAzg().removeMembers();
}
public void removeRole(String role)
{
m_azgChanged = true;
getAzg().removeRole(role);
}
public void removeRoles()
{
m_azgChanged = true;
getAzg().removeRoles();
}
public void setMaintainRole(String role)
{
m_azgChanged = true;
getAzg().setMaintainRole(role);
}
public void setProviderGroupId(String id)
{
m_azgChanged = true;
getAzg().setProviderGroupId(id);
}
public boolean keepIntersection(AuthzGroup other)
{
boolean changed = getAzg().keepIntersection(other);
if (changed) m_azgChanged = true;
return changed;
}
public boolean isSoftlyDeleted() {
return m_isSoftlyDeleted;
}
public Date getSoftlyDeletedDate() {
return m_softlyDeletedDate;
}
public void setSoftlyDeleted(boolean flag) {
m_isSoftlyDeleted = flag;
if(flag) {
m_softlyDeletedDate = new java.sql.Date(Calendar.getInstance().getTimeInMillis());
} else {
m_softlyDeletedDate = null;
}
}
/**
* Check if this Site's description field has been populated.
* Note that this is intentionally not exposed through the Site interface to keep it
* within the implementation package. The specifics of other lazy loading are not exposed
* through the Site interface; the collections are simply empty if not loaded. The
* SiteService encourages calls to {@link SiteService#getSite(String) getSite} and the
* Site interface exposes {@link Site#loadAll() loadAll} to ensure all loading.
*
* @return true if the description has been loaded for this Site object
*/
public boolean isDescriptionLoaded()
{
return m_descriptionLoaded;
}
/**
* Check whether this Site has been fully populated.
* Note that this is intentionally not exposed through the Site interface to keep it
* within the implementation package. The specifics of other lazy loading are not exposed
* through the Site interface; the collections are simply empty if not loaded. The
* SiteService encourages calls to {@link SiteService#getSite(String) getSite} and the
* Site interface exposes {@link Site#loadAll() loadAll} to ensure all loading.
*
* @return true if the Site object has been fully loaded (by loadAll)
*/
public boolean isFullyLoaded()
{
return m_fullyLoaded;
}
public void setFullyLoaded(boolean flag) {
m_fullyLoaded = flag;
}
}
| apache-2.0 |
arjun4084346/gobblin | gobblin-modules/gobblin-elasticsearch/src/main/java/org/apache/gobblin/elasticsearch/writer/FutureCallbackHolder.java | 7430 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.elasticsearch.writer;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.math3.util.Pair;
import org.apache.gobblin.writer.GenericWriteResponse;
import org.apache.gobblin.writer.WriteCallback;
import org.apache.gobblin.writer.WriteResponse;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkResponse;
import javax.annotation.Nullable;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
/**
* A class to hold Futures and Callbacks to support Async writes
*/
@Slf4j
public class FutureCallbackHolder {
@Getter
private final ActionListener<BulkResponse> actionListener;
private final BlockingQueue<Pair<WriteResponse, Throwable>> writeResponseQueue = new ArrayBlockingQueue<>(1);
@Getter
private final Future<WriteResponse> future;
private final AtomicBoolean done = new AtomicBoolean(false);
public FutureCallbackHolder(final @Nullable WriteCallback callback,
ExceptionLogger exceptionLogger,
final MalformedDocPolicy malformedDocPolicy) {
this.future = new Future<WriteResponse>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return done.get();
}
@Override
public WriteResponse get()
throws InterruptedException, ExecutionException {
Pair<WriteResponse, Throwable> writeResponseThrowablePair = writeResponseQueue.take();
return getWriteResponseorThrow(writeResponseThrowablePair);
}
@Override
public WriteResponse get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
Pair<WriteResponse, Throwable> writeResponseThrowablePair = writeResponseQueue.poll(timeout, unit);
if (writeResponseThrowablePair == null) {
throw new TimeoutException("Timeout exceeded while waiting for future to be done");
} else {
return getWriteResponseorThrow(writeResponseThrowablePair);
}
}
};
this.actionListener = new ActionListener<BulkResponse>() {
@Override
public void onResponse(BulkResponse bulkItemResponses) {
if (bulkItemResponses.hasFailures()) {
boolean logicalErrors = false;
boolean serverErrors = false;
for (BulkItemResponse bulkItemResponse: bulkItemResponses) {
if (bulkItemResponse.isFailed()) {
// check if the failure is permanent (logical) or transient (server)
if (isLogicalError(bulkItemResponse)) {
// check error policy
switch (malformedDocPolicy) {
case IGNORE: {
log.debug("Document id {} was malformed with error {}",
bulkItemResponse.getId(),
bulkItemResponse.getFailureMessage());
break;
}
case WARN: {
log.warn("Document id {} was malformed with error {}",
bulkItemResponse.getId(),
bulkItemResponse.getFailureMessage());
break;
}
default: {
// Pass through
}
}
logicalErrors = true;
} else {
serverErrors = true;
}
}
}
if (serverErrors) {
onFailure(new RuntimeException("Partial failures in the batch: " + bulkItemResponses.buildFailureMessage()));
} else if (logicalErrors) {
// all errors found were logical, throw RuntimeException if policy says to Fail
switch (malformedDocPolicy) {
case FAIL: {
onFailure(new RuntimeException("Partial non-recoverable failures in the batch. To ignore these, set "
+ ElasticsearchWriterConfigurationKeys.ELASTICSEARCH_WRITER_MALFORMED_DOC_POLICY + " to "
+ MalformedDocPolicy.IGNORE.name()));
break;
}
default: {
WriteResponse writeResponse = new GenericWriteResponse<BulkResponse>(bulkItemResponses);
writeResponseQueue.add(new Pair<WriteResponse, Throwable>(writeResponse, null));
if (callback != null) {
callback.onSuccess(writeResponse);
}
}
}
}
} else {
WriteResponse writeResponse = new GenericWriteResponse<BulkResponse>(bulkItemResponses);
writeResponseQueue.add(new Pair<WriteResponse, Throwable>(writeResponse, null));
if (callback != null) {
callback.onSuccess(writeResponse);
}
}
}
private boolean isLogicalError(BulkItemResponse bulkItemResponse) {
String failureMessage = bulkItemResponse.getFailureMessage();
return failureMessage.contains("IllegalArgumentException")
|| failureMessage.contains("illegal_argument_exception")
|| failureMessage.contains("MapperParsingException")
|| failureMessage.contains("mapper_parsing_exception");
}
@Override
public void onFailure(Exception exception) {
writeResponseQueue.add(new Pair<WriteResponse, Throwable>(null, exception));
if (exceptionLogger != null) {
exceptionLogger.log(exception);
}
if (callback != null) {
callback.onFailure(exception);
}
}
};
}
private WriteResponse getWriteResponseorThrow(Pair<WriteResponse, Throwable> writeResponseThrowablePair)
throws ExecutionException {
try {
if (writeResponseThrowablePair.getFirst() != null) {
return writeResponseThrowablePair.getFirst();
} else if (writeResponseThrowablePair.getSecond() != null) {
throw new ExecutionException(writeResponseThrowablePair.getSecond());
} else {
throw new ExecutionException(new RuntimeException("Could not find non-null WriteResponse pair"));
}
} finally {
done.set(true);
}
}
}
| apache-2.0 |
masaki-yamakawa/geode | geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/EntryColumnData.java | 1544 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.connectors.jdbc.internal;
import java.util.Collections;
import java.util.List;
class EntryColumnData {
private final List<ColumnData> entryKeyColumnData;
private final List<ColumnData> entryValueColumnData;
EntryColumnData(List<ColumnData> entryKeyColumnData, List<ColumnData> entryValueColumnData) {
this.entryKeyColumnData =
entryKeyColumnData != null ? entryKeyColumnData : Collections.emptyList();
this.entryValueColumnData =
entryValueColumnData != null ? entryValueColumnData : Collections.emptyList();
}
public List<ColumnData> getEntryKeyColumnData() {
return entryKeyColumnData;
}
public List<ColumnData> getEntryValueColumnData() {
return entryValueColumnData;
}
}
| apache-2.0 |
robin13/elasticsearch | server/src/test/java/org/elasticsearch/snapshots/SnapshotUtilsTests.java | 3087 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.snapshots;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
import java.util.List;
import static org.hamcrest.Matchers.containsInAnyOrder;
public class SnapshotUtilsTests extends ESTestCase {
public void testIndexNameFiltering() {
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{}, new String[]{"foo", "bar", "baz"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"*"}, new String[]{"foo", "bar", "baz"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"_all"}, new String[]{"foo", "bar", "baz"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"foo", "bar", "baz"}, new String[]{"foo", "bar", "baz"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"foo"}, new String[]{"foo"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"baz", "not_available"}, new String[]{"baz"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"ba*", "-bar", "-baz"}, new String[]{});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"-bar"}, new String[]{"foo", "baz"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"-ba*"}, new String[]{"foo"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"+ba*"}, new String[]{"bar", "baz"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"+bar", "+foo"}, new String[]{"bar", "foo"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"zzz", "bar"}, IndicesOptions.lenientExpandOpen(),
new String[]{"bar"});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{""}, IndicesOptions.lenientExpandOpen(), new String[]{});
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"foo", "", "ba*"}, IndicesOptions.lenientExpandOpen(),
new String[]{"foo", "bar", "baz"});
}
private void assertIndexNameFiltering(String[] indices, String[] filter, String[] expected) {
assertIndexNameFiltering(indices, filter, IndicesOptions.lenientExpandOpen(), expected);
}
private void assertIndexNameFiltering(String[] indices, String[] filter, IndicesOptions indicesOptions, String[] expected) {
List<String> indicesList = Arrays.asList(indices);
List<String> actual = SnapshotUtils.filterIndices(indicesList, filter, indicesOptions);
assertThat(actual, containsInAnyOrder(expected));
}
}
| apache-2.0 |
lato333/guacamole-client | guacamole-common/src/main/java/org/apache/guacamole/GuacamoleServerException.java | 2159 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.guacamole;
import org.apache.guacamole.protocol.GuacamoleStatus;
/**
* A generic exception thrown when part of the Guacamole API encounters
* an unexpected, internal error. An internal error, if correctable, would
* require correction on the server side, not the client.
*/
public class GuacamoleServerException extends GuacamoleException {
/**
* Creates a new GuacamoleServerException with the given message and cause.
*
* @param message A human readable description of the exception that
* occurred.
* @param cause The cause of this exception.
*/
public GuacamoleServerException(String message, Throwable cause) {
super(message, cause);
}
/**
* Creates a new GuacamoleServerException with the given message.
*
* @param message A human readable description of the exception that
* occurred.
*/
public GuacamoleServerException(String message) {
super(message);
}
/**
* Creates a new GuacamoleServerException with the given cause.
*
* @param cause The cause of this exception.
*/
public GuacamoleServerException(Throwable cause) {
super(cause);
}
@Override
public GuacamoleStatus getStatus() {
return GuacamoleStatus.SERVER_ERROR;
}
}
| apache-2.0 |
robin13/elasticsearch | client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java | 2922 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
public class PerPartitionCategorizationConfig implements ToXContentObject {
public static final ParseField TYPE_FIELD = new ParseField("per_partition_categorization");
public static final ParseField ENABLED_FIELD = new ParseField("enabled");
public static final ParseField STOP_ON_WARN = new ParseField("stop_on_warn");
public static final ConstructingObjectParser<PerPartitionCategorizationConfig, Void> PARSER =
new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(), true,
a -> new PerPartitionCategorizationConfig((boolean) a[0], (Boolean) a[1]));
static {
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD);
PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), STOP_ON_WARN);
}
private final boolean enabled;
private final boolean stopOnWarn;
public PerPartitionCategorizationConfig() {
this(false, null);
}
public PerPartitionCategorizationConfig(boolean enabled, Boolean stopOnWarn) {
this.enabled = enabled;
this.stopOnWarn = (stopOnWarn == null) ? false : stopOnWarn;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(ENABLED_FIELD.getPreferredName(), enabled);
if (enabled) {
builder.field(STOP_ON_WARN.getPreferredName(), stopOnWarn);
}
builder.endObject();
return builder;
}
public boolean isEnabled() {
return enabled;
}
public boolean isStopOnWarn() {
return stopOnWarn;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof PerPartitionCategorizationConfig == false) {
return false;
}
PerPartitionCategorizationConfig that = (PerPartitionCategorizationConfig) other;
return this.enabled == that.enabled && this.stopOnWarn == that.stopOnWarn;
}
@Override
public int hashCode() {
return Objects.hash(enabled, stopOnWarn);
}
}
| apache-2.0 |
robin13/elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/session/Payload.java | 651 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.eql.session;
import org.elasticsearch.common.unit.TimeValue;
import java.util.List;
/**
* Container for final results. Used for completed data, such as Events or Sequences.
*/
public interface Payload {
enum Type {
EVENT,
SEQUENCE;
}
Type resultType();
boolean timedOut();
TimeValue timeTook();
List<?> values();
}
| apache-2.0 |
robin13/elasticsearch | libs/nio/src/test/java/org/elasticsearch/nio/BytesChannelContextTests.java | 10905 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.nio;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.util.PageCacheRecycler;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.SocketChannel;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class BytesChannelContextTests extends ESTestCase {
private CheckedFunction<InboundChannelBuffer, Integer, IOException> readConsumer;
private NioSocketChannel channel;
private SocketChannel rawChannel;
private BytesChannelContext context;
private InboundChannelBuffer channelBuffer;
private NioSelector selector;
private BiConsumer<Void, Exception> listener;
private int messageLength;
@Before
@SuppressWarnings("unchecked")
public void init() {
readConsumer = mock(CheckedFunction.class);
messageLength = randomInt(96) + 20;
selector = mock(NioSelector.class);
listener = mock(BiConsumer.class);
channel = mock(NioSocketChannel.class);
rawChannel = mock(SocketChannel.class);
channelBuffer = InboundChannelBuffer.allocatingInstance();
TestReadWriteHandler handler = new TestReadWriteHandler(readConsumer);
when(channel.getRawChannel()).thenReturn(rawChannel);
context = new BytesChannelContext(channel, selector, mock(Config.Socket.class), mock(Consumer.class), handler, channelBuffer);
when(selector.isOnCurrentThread()).thenReturn(true);
ByteBuffer buffer = ByteBuffer.allocate(1 << 14);
when(selector.getIoBuffer()).thenAnswer(invocationOnMock -> {
buffer.clear();
return buffer;
});
}
public void testSuccessfulRead() throws IOException {
byte[] bytes = createMessage(messageLength);
when(rawChannel.read(any(ByteBuffer.class))).thenAnswer(invocationOnMock -> {
ByteBuffer buffer = (ByteBuffer) invocationOnMock.getArguments()[0];
buffer.put(bytes);
return bytes.length;
});
when(readConsumer.apply(channelBuffer)).thenReturn(messageLength, 0);
assertEquals(messageLength, context.read());
assertEquals(0, channelBuffer.getIndex());
assertEquals(PageCacheRecycler.BYTE_PAGE_SIZE - bytes.length, channelBuffer.getCapacity());
verify(readConsumer, times(1)).apply(channelBuffer);
}
public void testMultipleReadsConsumed() throws IOException {
byte[] bytes = createMessage(messageLength * 2);
when(rawChannel.read(any(ByteBuffer.class))).thenAnswer(invocationOnMock -> {
ByteBuffer buffer = (ByteBuffer) invocationOnMock.getArguments()[0];
buffer.put(bytes);
return bytes.length;
});
when(readConsumer.apply(channelBuffer)).thenReturn(messageLength, messageLength, 0);
assertEquals(bytes.length, context.read());
assertEquals(0, channelBuffer.getIndex());
assertEquals(PageCacheRecycler.BYTE_PAGE_SIZE - bytes.length, channelBuffer.getCapacity());
verify(readConsumer, times(2)).apply(channelBuffer);
}
public void testPartialRead() throws IOException {
byte[] bytes = createMessage(messageLength);
when(rawChannel.read(any(ByteBuffer.class))).thenAnswer(invocationOnMock -> {
ByteBuffer buffer = (ByteBuffer) invocationOnMock.getArguments()[0];
buffer.put(bytes);
return bytes.length;
});
when(readConsumer.apply(channelBuffer)).thenReturn(0);
assertEquals(messageLength, context.read());
assertEquals(bytes.length, channelBuffer.getIndex());
verify(readConsumer, times(1)).apply(channelBuffer);
when(readConsumer.apply(channelBuffer)).thenReturn(messageLength * 2, 0);
assertEquals(messageLength, context.read());
assertEquals(0, channelBuffer.getIndex());
assertEquals(PageCacheRecycler.BYTE_PAGE_SIZE - (bytes.length * 2), channelBuffer.getCapacity());
verify(readConsumer, times(2)).apply(channelBuffer);
}
public void testReadThrowsIOException() throws IOException {
IOException ioException = new IOException();
when(rawChannel.read(any(ByteBuffer.class))).thenThrow(ioException);
IOException ex = expectThrows(IOException.class, () -> context.read());
assertSame(ioException, ex);
}
public void testReadThrowsIOExceptionMeansReadyForClose() throws IOException {
when(rawChannel.read(any(ByteBuffer.class))).thenThrow(new IOException());
assertFalse(context.selectorShouldClose());
expectThrows(IOException.class, () -> context.read());
assertTrue(context.selectorShouldClose());
}
public void testReadLessThanZeroMeansReadyForClose() throws IOException {
when(rawChannel.read(any(ByteBuffer.class))).thenReturn(-1);
assertEquals(0, context.read());
assertTrue(context.selectorShouldClose());
}
@SuppressWarnings("varargs")
public void testQueuedWriteIsFlushedInFlushCall() throws Exception {
assertFalse(context.readyForFlush());
ByteBuffer[] buffers = {ByteBuffer.allocate(10)};
FlushReadyWrite flushOperation = mock(FlushReadyWrite.class);
context.queueWriteOperation(flushOperation);
assertTrue(context.readyForFlush());
when(flushOperation.getBuffersToWrite(anyInt())).thenReturn(buffers);
when(flushOperation.isFullyFlushed()).thenReturn(false, true);
when(flushOperation.getListener()).thenReturn(listener);
context.flushChannel();
ByteBuffer buffer = buffers[0].duplicate();
buffer.flip();
verify(rawChannel).write(eq(buffer));
verify(selector).executeListener(listener, null);
assertFalse(context.readyForFlush());
}
public void testPartialFlush() throws IOException {
assertFalse(context.readyForFlush());
FlushReadyWrite flushOperation = mock(FlushReadyWrite.class);
context.queueWriteOperation(flushOperation);
assertTrue(context.readyForFlush());
when(flushOperation.isFullyFlushed()).thenReturn(false);
when(flushOperation.getBuffersToWrite(anyInt())).thenReturn(new ByteBuffer[] {ByteBuffer.allocate(3)});
context.flushChannel();
verify(listener, times(0)).accept(null, null);
assertTrue(context.readyForFlush());
}
@SuppressWarnings("unchecked")
public void testMultipleWritesPartialFlushes() throws IOException {
assertFalse(context.readyForFlush());
BiConsumer<Void, Exception> listener2 = mock(BiConsumer.class);
FlushReadyWrite flushOperation1 = mock(FlushReadyWrite.class);
FlushReadyWrite flushOperation2 = mock(FlushReadyWrite.class);
when(flushOperation1.getBuffersToWrite(anyInt())).thenReturn(new ByteBuffer[] {ByteBuffer.allocate(3)});
when(flushOperation2.getBuffersToWrite(anyInt())).thenReturn(new ByteBuffer[] {ByteBuffer.allocate(3)});
when(flushOperation1.getListener()).thenReturn(listener);
when(flushOperation2.getListener()).thenReturn(listener2);
context.queueWriteOperation(flushOperation1);
context.queueWriteOperation(flushOperation2);
assertTrue(context.readyForFlush());
when(flushOperation1.isFullyFlushed()).thenReturn(false, true);
when(flushOperation2.isFullyFlushed()).thenReturn(false);
context.flushChannel();
verify(selector).executeListener(listener, null);
verify(listener2, times(0)).accept(null, null);
assertTrue(context.readyForFlush());
when(flushOperation2.isFullyFlushed()).thenReturn(false, true);
context.flushChannel();
verify(selector).executeListener(listener2, null);
assertFalse(context.readyForFlush());
}
public void testWhenIOExceptionThrownListenerIsCalled() throws IOException {
assertFalse(context.readyForFlush());
ByteBuffer[] buffers = {ByteBuffer.allocate(10)};
FlushReadyWrite flushOperation = mock(FlushReadyWrite.class);
context.queueWriteOperation(flushOperation);
assertTrue(context.readyForFlush());
IOException exception = new IOException();
when(flushOperation.getBuffersToWrite(anyInt())).thenReturn(buffers);
when(rawChannel.write(any(ByteBuffer.class))).thenThrow(exception);
when(flushOperation.getListener()).thenReturn(listener);
expectThrows(IOException.class, () -> context.flushChannel());
verify(selector).executeFailedListener(listener, exception);
assertFalse(context.readyForFlush());
}
public void testWriteIOExceptionMeansChannelReadyToClose() throws IOException {
ByteBuffer[] buffers = {ByteBuffer.allocate(10)};
FlushReadyWrite flushOperation = mock(FlushReadyWrite.class);
context.queueWriteOperation(flushOperation);
IOException exception = new IOException();
when(flushOperation.getBuffersToWrite(anyInt())).thenReturn(buffers);
when(rawChannel.write(any(ByteBuffer.class))).thenThrow(exception);
assertFalse(context.selectorShouldClose());
expectThrows(IOException.class, () -> context.flushChannel());
assertTrue(context.selectorShouldClose());
}
public void testInitiateCloseSchedulesCloseWithSelector() {
context.closeChannel();
verify(selector).queueChannelClose(channel);
}
private static byte[] createMessage(int length) {
byte[] bytes = new byte[length];
for (int i = 0; i < length; ++i) {
bytes[i] = randomByte();
}
return bytes;
}
private static class TestReadWriteHandler extends BytesWriteHandler {
private final CheckedFunction<InboundChannelBuffer, Integer, IOException> fn;
private TestReadWriteHandler(CheckedFunction<InboundChannelBuffer, Integer, IOException> fn) {
this.fn = fn;
}
@Override
public int consumeReads(InboundChannelBuffer channelBuffer) throws IOException {
return fn.apply(channelBuffer);
}
}
}
| apache-2.0 |
grpbwl/closure-compiler | src/com/google/javascript/jscomp/FunctionToBlockMutator.java | 17013 | /*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.javascript.jscomp.FunctionArgumentInjector.THIS_MARKER;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.javascript.jscomp.MakeDeclaredNamesUnique.InlineRenamer;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* A class to transform the body of a function into a generic block suitable
* for inlining.
*
* @author johnlenz@google.com (John Lenz)
*/
class FunctionToBlockMutator {
private AbstractCompiler compiler;
private Supplier<String> safeNameIdSupplier;
FunctionToBlockMutator(
AbstractCompiler compiler, Supplier<String> safeNameIdSupplier) {
this.compiler = compiler;
this.safeNameIdSupplier = safeNameIdSupplier;
}
/**
* @param fnName The name to use when preparing human readable names.
* @param fnNode The function to prepare.
* @param callNode The call node that will be replaced.
* @param resultName Function results should be assigned to this name.
* @param needsDefaultResult Whether the result value must be set.
* @param isCallInLoop Whether the function body must be prepared to be
* injected into the body of a loop.
* @return A clone of the function body mutated to be suitable for injection
* as a statement into another code block.
*/
Node mutate(String fnName, Node fnNode, Node callNode,
String resultName, boolean needsDefaultResult, boolean isCallInLoop) {
Node newFnNode = fnNode.cloneTree();
// Now that parameter names have been replaced, make sure all the local
// names are unique, to allow functions to be inlined multiple times
// without causing conflicts.
makeLocalNamesUnique(newFnNode, isCallInLoop);
// Function declarations must be rewritten as function expressions as
// they will be within a block and normalization prevents function
// declarations within block as browser implementations vary.
rewriteFunctionDeclarations(newFnNode.getLastChild());
// TODO(johnlenz): Mark NAME nodes constant for parameters that are not
// modified.
Set<String> namesToAlias =
FunctionArgumentInjector.findModifiedParameters(newFnNode);
LinkedHashMap<String, Node> args =
FunctionArgumentInjector.getFunctionCallParameterMap(
newFnNode, callNode, this.safeNameIdSupplier);
boolean hasArgs = !args.isEmpty();
if (hasArgs) {
FunctionArgumentInjector.maybeAddTempsForCallArguments(
newFnNode, args, namesToAlias, compiler.getCodingConvention());
}
Node newBlock = NodeUtil.getFunctionBody(newFnNode);
// Make the newBlock insertable .
newBlock.detachFromParent();
if (hasArgs) {
Node inlineResult = aliasAndInlineArguments(newBlock,
args, namesToAlias);
Preconditions.checkState(newBlock == inlineResult);
}
//
// For calls inlined into loops, VAR declarations are not reinitialized to
// undefined as they would have been if the function were called, so ensure
// that they are properly initialized.
//
if (isCallInLoop) {
fixUnitializedVarDeclarations(newBlock);
}
String labelName = getLabelNameForFunction(fnName);
Node injectableBlock = replaceReturns(
newBlock, resultName, labelName, needsDefaultResult);
Preconditions.checkState(injectableBlock != null);
return injectableBlock;
}
/**
* @param n The node to inspect
*/
private static void rewriteFunctionDeclarations(Node n) {
if (n.isFunction()) {
if (NodeUtil.isFunctionDeclaration(n)) {
// Rewrite: function f() {} ==> var f = function() {}
Node fnNameNode = n.getFirstChild();
Node name = IR.name(fnNameNode.getString()).srcref(fnNameNode);
Node var = IR.var(name).srcref(n);
fnNameNode.setString("");
// Add the VAR, remove the FUNCTION
n.getParent().replaceChild(n, var);
// readd the function as a function expression
name.addChildToFront(n);
}
return;
}
for (Node c = n.getFirstChild(), next; c != null; c = next) {
next = c.getNext(); // We may rewrite "c"
rewriteFunctionDeclarations(c);
}
}
/**
* For all VAR node with uninitialized declarations, set
* the values to be "undefined".
*/
private static void fixUnitializedVarDeclarations(Node n) {
// Inner loop structure must already have logic to initialize its
// variables. In particular FOR-IN structures must not be modified.
if (NodeUtil.isLoopStructure(n)) {
return;
}
// For all VARs
if (n.isVar()) {
Node name = n.getFirstChild();
// It isn't initialized.
if (!name.hasChildren()) {
Node srcLocation = name;
name.addChildToBack(NodeUtil.newUndefinedNode(srcLocation));
}
return;
}
for (Node c = n.getFirstChild(); c != null; c = c.getNext()) {
fixUnitializedVarDeclarations(c);
}
}
/**
* Fix-up all local names to be unique for this subtree.
* @param fnNode A mutable instance of the function to be inlined.
*/
private void makeLocalNamesUnique(Node fnNode, boolean isCallInLoop) {
Supplier<String> idSupplier = compiler.getUniqueNameIdSupplier();
// Make variable names unique to this instance.
NodeTraversal.traverse(
compiler, fnNode, new MakeDeclaredNamesUnique(
new InlineRenamer(
compiler.getCodingConvention(),
idSupplier,
"inline_",
isCallInLoop)));
// Make label names unique to this instance.
new RenameLabels(compiler, new LabelNameSupplier(idSupplier), false)
.process(null, fnNode);
}
static class LabelNameSupplier implements Supplier<String> {
final Supplier<String> idSupplier;
LabelNameSupplier(Supplier<String> idSupplier) {
this.idSupplier = idSupplier;
}
@Override
public String get() {
return "JSCompiler_inline_label_" + idSupplier.get();
}
}
/**
* Create a unique label name.
*/
private String getLabelNameForFunction(String fnName){
String name = (fnName == null || fnName.isEmpty()) ? "anon" : fnName;
return "JSCompiler_inline_label_" + name + "_" + safeNameIdSupplier.get();
}
/**
* Create a unique "this" name.
*/
private String getUniqueThisName() {
return "JSCompiler_inline_this_" + safeNameIdSupplier.get();
}
/**
* Inlines the arguments within the node tree using the given argument map,
* replaces "unsafe" names with local aliases.
*
* The aliases for unsafe require new VAR declarations, so this function
* can not be used in for direct CALL node replacement as VAR nodes can not be
* created there.
*
* @return The node or its replacement.
*/
private Node aliasAndInlineArguments(
Node fnTemplateRoot, LinkedHashMap<String, Node> argMap,
Set<String> namesToAlias) {
if (namesToAlias == null || namesToAlias.isEmpty()) {
// There are no names to alias, just inline the arguments directly.
Node result = FunctionArgumentInjector.inject(
compiler, fnTemplateRoot, null, argMap);
Preconditions.checkState(result == fnTemplateRoot);
return result;
} else {
// Create local alias of names that can not be safely
// used directly.
// An arg map that will be updated to contain the
// safe aliases.
Map<String, Node> newArgMap = new HashMap<>(argMap);
// Declare the alias in the same order as they
// are declared.
List<Node> newVars = new LinkedList<>();
// NOTE: argMap is a linked map so we get the parameters in the
// order that they were declared.
for (Entry<String, Node> entry : argMap.entrySet()) {
String name = entry.getKey();
if (namesToAlias.contains(name)) {
if (name.equals(THIS_MARKER)) {
boolean referencesThis = NodeUtil.referencesThis(fnTemplateRoot);
// Update "this", this is only necessary if "this" is referenced
// and the value of "this" is not Token.THIS, or the value of "this"
// has side effects.
Node value = entry.getValue();
if (!value.isThis()
&& (referencesThis
|| NodeUtil.mayHaveSideEffects(value, compiler))) {
String newName = getUniqueThisName();
Node newValue = entry.getValue().cloneTree();
Node newNode = NodeUtil.newVarNode(newName, newValue)
.copyInformationFromForTree(newValue);
newVars.add(0, newNode);
// Remove the parameter from the list to replace.
newArgMap.put(THIS_MARKER,
IR.name(newName)
.srcrefTree(newValue));
}
} else {
Node newValue = entry.getValue().cloneTree();
Node newNode = NodeUtil.newVarNode(name, newValue)
.copyInformationFromForTree(newValue);
newVars.add(0, newNode);
// Remove the parameter from the list to replace.
newArgMap.remove(name);
}
}
}
// Inline the arguments.
Node result = FunctionArgumentInjector.inject(
compiler, fnTemplateRoot, null, newArgMap);
Preconditions.checkState(result == fnTemplateRoot);
// Now that the names have been replaced, add the new aliases for
// the old names.
for (Node n : newVars) {
fnTemplateRoot.addChildToFront(n);
}
return result;
}
}
/**
* Convert returns to assignments and breaks, as needed.
* For example, with a labelName of 'foo':
* {
* return a;
* }
* becomes:
* foo: {
* a;
* break foo;
* }
* or
* foo: {
* resultName = a;
* break foo;
* }
*
* @param resultMustBeSet Whether the result must always be set to a value.
* @return The node containing the transformed block, this may be different
* than the passed in node 'block'.
*/
private static Node replaceReturns(
Node block, String resultName, String labelName,
boolean resultMustBeSet) {
Preconditions.checkNotNull(block);
Preconditions.checkNotNull(labelName);
Node root = block;
boolean hasReturnAtExit = false;
int returnCount = NodeUtil.getNodeTypeReferenceCount(
block, Token.RETURN, new NodeUtil.MatchShallowStatement());
if (returnCount > 0) {
hasReturnAtExit = hasReturnAtExit(block);
// TODO(johnlenz): Simpler not to special case this,
// and let it be optimized later.
if (hasReturnAtExit) {
convertLastReturnToStatement(block, resultName);
returnCount--;
}
if (returnCount > 0) {
// A label and breaks are needed.
// Add the breaks
replaceReturnWithBreak(block, null, resultName, labelName);
// Add label
Node name = IR.labelName(labelName).srcref(block);
Node label = IR.label(name, block).srcref(block);
Node newRoot = IR.block().srcref(block);
newRoot.addChildrenToBack(label);
// The label is now the root.
root = newRoot;
}
}
// If there wasn't an return at the end of the function block, and we need
// a result, add one to the block.
if (resultMustBeSet && !hasReturnAtExit && resultName != null) {
addDummyAssignment(block, resultName);
}
return root;
}
/**********************************************************************
* Functions following here are general node transformation functions
**********************************************************************/
/**
* Example:
* a = (void) 0;
*/
private static void addDummyAssignment(Node node, String resultName) {
Preconditions.checkArgument(node.isBlock());
// A result is needed create a dummy value.
Node srcLocation = node;
Node retVal = NodeUtil.newUndefinedNode(srcLocation);
Node resultNode = createAssignStatementNode(resultName, retVal);
resultNode.copyInformationFromForTree(node);
node.addChildrenToBack(resultNode);
}
/**
* Replace the 'return' statement with its child expression.
* "return foo()" becomes "foo()" or "resultName = foo()"
* "return" is removed or becomes "resultName = void 0".
*
* @param block
* @param resultName
*/
private static void convertLastReturnToStatement(
Node block, String resultName) {
Node ret = block.getLastChild();
Preconditions.checkArgument(ret.isReturn());
Node resultNode = getReplacementReturnStatement(ret, resultName);
if (resultNode == null) {
block.removeChild(ret);
} else {
resultNode.copyInformationFromForTree(ret);
block.replaceChild(ret, resultNode);
}
}
/**
* Create a valid statement Node containing an assignment to name of the
* given expression.
*/
private static Node createAssignStatementNode(String name, Node expression) {
// Create 'name = result-expression;' statement.
// EXPR (ASSIGN (NAME, EXPRESSION))
Node nameNode = IR.name(name);
Node assign = IR.assign(nameNode, expression);
return NodeUtil.newExpr(assign);
}
/**
* Replace the 'return' statement with its child expression.
* If the result is needed (resultName != null):
* "return foo()" becomes "resultName = foo()"
* "return" becomes "resultName = void 0".
* Otherwise:
* "return foo()" becomes "foo()"
* "return", null is returned.
*/
private static Node getReplacementReturnStatement(
Node node, String resultName) {
Node resultNode = null;
Node retVal = null;
if (node.hasChildren()) {
// Clone the child as the child hasn't been removed
// from the node yet.
retVal = node.getFirstChild().cloneTree();
}
if (resultName == null) {
if (retVal != null) {
resultNode = NodeUtil.newExpr(retVal); // maybe null.
}
} else {
if (retVal == null) {
// A result is needed create a dummy value.
Node srcLocation = node;
retVal = NodeUtil.newUndefinedNode(srcLocation);
}
// Create a "resultName = retVal;" statement.
resultNode = createAssignStatementNode(resultName, retVal);
}
return resultNode;
}
/**
* @return Whether the given block end with an return statement.
*/
private static boolean hasReturnAtExit(Node block) {
// Only inline functions that return something (empty returns
// will be handled by ConstFolding+EmptyFunctionRemoval)
return (block.getLastChild().isReturn());
}
/**
* Replace the 'return' statement with its child expression.
* "return foo()" becomes "{foo(); break;}" or
* "{resultName = foo(); break;}"
* "return" becomes {break;} or "{resultName = void 0;break;}".
*/
private static Node replaceReturnWithBreak(Node current, Node parent,
String resultName, String labelName) {
if (current.isFunction()
|| current.isExprResult()) {
// Don't recurse into functions definitions, and expressions can't
// contain RETURN nodes.
return current;
}
if (current.isReturn()) {
Preconditions.checkState(NodeUtil.isStatementBlock(parent));
Node resultNode = getReplacementReturnStatement(current, resultName);
Node breakNode = IR.breakNode(IR.labelName(labelName));
// Replace the node in parent, and reset current to the first new child.
breakNode.copyInformationFromForTree(current);
parent.replaceChild(current, breakNode);
if (resultNode != null) {
resultNode.copyInformationFromForTree(current);
parent.addChildBefore(resultNode, breakNode);
}
current = breakNode;
} else {
for (Node c = current.getFirstChild(); c != null; c = c.getNext()) {
// c may be replaced.
c = replaceReturnWithBreak(c, current, resultName, labelName);
}
}
return current;
}
}
| apache-2.0 |
mtjandra/izpack | izpack-installer/src/main/java/com/izforge/izpack/installer/web/WebAccessor.java | 11016 | /*
* IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/
* http://izpack.codehaus.org/
*
* Copyright 2002 Johannes Lehtinen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.izforge.izpack.installer.web;
import javax.swing.*;
import java.awt.*;
import java.io.InputStream;
import java.net.*;
import java.util.Locale;
/**
* Dialogs for password authentication and firewall specification, when needed, during web
* installation.
*
* @author Chadwick McHenry
* @author <a href="vralev@redhat.com">Vladimir Ralev</a>
* @version 1.0
*/
public class WebAccessor
{
private Thread openerThread = null;
private InputStream iStream = null;
private Exception exception = null;
private Object soloCancelOption = null;
private Component parent = null;
private JDialog dialog = null;
private boolean tryProxy = false;
private JPanel passwordPanel = null;
private JLabel promptLabel;
private JTextField nameField;
private JPasswordField passField;
private JPanel proxyPanel = null;
private JLabel errorLabel;
private JTextField hostField;
private JTextField portField;
private String url;
private int contentLength = -1;
/**
* Not yet Implemented: placeholder for headless installs.
*
* @throws UnsupportedOperationException
*/
public WebAccessor()
{
// the class should probably be rearranged to do this.
throw new UnsupportedOperationException();
}
/**
* Create a WebAccessor that prompts for proxies and passwords using a JDialog.
*
* @param parent determines the frame in which the dialog is displayed; if the parentComponent
* has no Frame, a default Frame is used
*/
public WebAccessor(Component parent)
{
this.parent = parent;
Locale locale = null;
if (parent != null)
{
parent.getLocale();
}
soloCancelOption = UIManager.get("OptionPane.cancelButtonText", locale);// TODO:
// i18n?
Authenticator.setDefault(new MyDialogAuthenticator());
}
/**
* Opens a URL connection and returns it's InputStream for the specified URL.
*
* @param url the url to open the stream to.
* @return an input stream ready to read, or null on failure
*/
public InputStream openInputStream(URL url)
{
setUrl(url.toExternalForm());
OPEN_URL:
while (true)
{
startOpening(url); // this starts a thread
Thread.yield();
// Wait a bit to see if the stream comes up
int retry = 28;
while (exception == null && iStream == null && retry > 0)
{
try
{
Thread.sleep(200);
retry--;
}
catch (Exception e)
{
System.out.println("In openInputStream: " + e);
}
}
/* Try to find a proxy if that failed */
// success!
if (iStream != null)
{
break;
}
// an exception we don't expect setting a proxy to fix
if (!tryProxy)
{
break;
}
// else (exception != null)
// show proxy dialog until valid values or cancel
JPanel panel = getProxyPanel();
errorLabel.setText("Unable to connect: " + exception.getMessage());
while (true)
{
int result = JOptionPane.showConfirmDialog(parent, panel, "Proxy Configuration",
JOptionPane.OK_CANCEL_OPTION, JOptionPane.QUESTION_MESSAGE);
if (result != JOptionPane.OK_OPTION) // canceled
{
break OPEN_URL;
}
String host = null;
String port = null;
try
{
InetAddress addr = InetAddress.getByName(hostField.getText());
host = addr.getHostName();
}
catch (Exception x)
{
errorLabel.setText("Unable to resolve Host");
Toolkit.getDefaultToolkit().beep();
}
try
{
if (host != null)
{
port = Integer.valueOf(portField.getText()).toString();
}
}
catch (NumberFormatException x)
{
errorLabel.setText("Invalid Port");
Toolkit.getDefaultToolkit().beep();
}
if (host != null && port != null)
{
// System.err.println ("Setting http proxy: "+ host
// +":"+ port);
System.getProperties().put("proxySet", "true");
System.getProperties().put("proxyHost", host);
System.getProperties().put("proxyPort", port);
break;
}
}
}
if (iStream == null)
{
openerThread.interrupt();
}
return iStream;
}
private void startOpening(final URL url)
{
final WebAccessor webAccessor = this;
openerThread = new Thread()
{
public void run()
{
iStream = null;
try
{
tryProxy = false;
URLConnection connection = url.openConnection();
if (connection instanceof HttpURLConnection)
{
HttpURLConnection htc = (HttpURLConnection) connection;
contentLength = htc.getContentLength();
}
//InputStream iii = echoSocket.getInputStream();
InputStream inputStream = connection.getInputStream();
iStream = new LoggedInputStream(inputStream, webAccessor); // just to make
}
catch (ConnectException x)
{ // could be an incorrect proxy
tryProxy = true;
exception = x;
}
catch (Exception x)
{
// Exceptions that get here are considered cancels or
// missing
// pages, eg 401 if user finally cancels auth
exception = x;
}
finally
{
// if dialog is in use, allow it to become visible /before/
// closing
// it, else on /fast/ connectinos, it may open later and
// hang!
if (dialog != null)
{
Thread.yield();
dialog.setVisible(false);
}
}
}
};
openerThread.start();
}
/**
* Only to be called after an initial error has indicated a connection problem
*/
private JPanel getProxyPanel()
{
if (proxyPanel == null)
{
proxyPanel = new JPanel(new BorderLayout(5, 5));
errorLabel = new JLabel();
JPanel fields = new JPanel(new GridLayout(2, 2));
String hostDefaultValue = (String) System.getProperties().get("proxyHost");
String portDefaultValue = (String) System.getProperties().get("proxyPort");
hostField = new JTextField(hostDefaultValue != null ? hostDefaultValue : "");
portField = new JTextField(portDefaultValue != null ? portDefaultValue : "");
JLabel host = new JLabel("Host: "); // TODO: i18n
JLabel port = new JLabel("Port: "); // TODO: i18n
fields.add(host);
fields.add(hostField);
fields.add(port);
fields.add(portField);
JLabel exampleLabel = new JLabel("e.g. host=\"gatekeeper.example.com\" port=\"80\"");
proxyPanel.add(errorLabel, BorderLayout.NORTH);
proxyPanel.add(fields, BorderLayout.CENTER);
proxyPanel.add(exampleLabel, BorderLayout.SOUTH);
}
proxyPanel.validate();
return proxyPanel;
}
private JPanel getPasswordPanel()
{
if (passwordPanel == null)
{
passwordPanel = new JPanel(new BorderLayout(5, 5));
promptLabel = new JLabel();
JPanel fields = new JPanel(new GridLayout(2, 2));
nameField = new JTextField();
passField = new JPasswordField();
JLabel name = new JLabel("Name: "); // TODO: i18n
JLabel pass = new JLabel("Password: "); // TODO: i18n
fields.add(name);
fields.add(nameField);
fields.add(pass);
fields.add(passField);
passwordPanel.add(promptLabel, BorderLayout.NORTH);
passwordPanel.add(fields, BorderLayout.CENTER);
}
passField.setText("");
return passwordPanel;
}
/**
* Authenticates via dialog when needed.
*/
private class MyDialogAuthenticator extends Authenticator
{
public PasswordAuthentication getPasswordAuthentication()
{
// TODO: i18n
JPanel panel = getPasswordPanel();
String prompt = getRequestingPrompt();
InetAddress addr = getRequestingSite();
if (addr != null)
{
prompt += " (" + addr.getHostName() + ")";
}
promptLabel.setText(prompt);
int result = JOptionPane.showConfirmDialog(parent, panel, "Enter Password", JOptionPane.OK_CANCEL_OPTION,
JOptionPane.QUESTION_MESSAGE);
if (result != JOptionPane.OK_OPTION)
{
return null;
}
return new PasswordAuthentication(nameField.getText(), passField.getPassword());
}
}
public String getUrl()
{
return url;
}
public void setUrl(String url)
{
this.url = url;
}
public int getContentLength()
{
return contentLength;
}
}
| apache-2.0 |