gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.auditmanager.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/auditmanager-2017-07-25/ListNotifications" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListNotificationsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The returned list of notifications.
* </p>
*/
private java.util.List<Notification> notifications;
/**
* <p>
* The pagination token that's used to fetch the next set of results.
* </p>
*/
private String nextToken;
/**
* <p>
* The returned list of notifications.
* </p>
*
* @return The returned list of notifications.
*/
public java.util.List<Notification> getNotifications() {
return notifications;
}
/**
* <p>
* The returned list of notifications.
* </p>
*
* @param notifications
* The returned list of notifications.
*/
public void setNotifications(java.util.Collection<Notification> notifications) {
if (notifications == null) {
this.notifications = null;
return;
}
this.notifications = new java.util.ArrayList<Notification>(notifications);
}
/**
* <p>
* The returned list of notifications.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setNotifications(java.util.Collection)} or {@link #withNotifications(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param notifications
* The returned list of notifications.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListNotificationsResult withNotifications(Notification... notifications) {
if (this.notifications == null) {
setNotifications(new java.util.ArrayList<Notification>(notifications.length));
}
for (Notification ele : notifications) {
this.notifications.add(ele);
}
return this;
}
/**
* <p>
* The returned list of notifications.
* </p>
*
* @param notifications
* The returned list of notifications.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListNotificationsResult withNotifications(java.util.Collection<Notification> notifications) {
setNotifications(notifications);
return this;
}
/**
* <p>
* The pagination token that's used to fetch the next set of results.
* </p>
*
* @param nextToken
* The pagination token that's used to fetch the next set of results.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The pagination token that's used to fetch the next set of results.
* </p>
*
* @return The pagination token that's used to fetch the next set of results.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The pagination token that's used to fetch the next set of results.
* </p>
*
* @param nextToken
* The pagination token that's used to fetch the next set of results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListNotificationsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNotifications() != null)
sb.append("Notifications: ").append(getNotifications()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListNotificationsResult == false)
return false;
ListNotificationsResult other = (ListNotificationsResult) obj;
if (other.getNotifications() == null ^ this.getNotifications() == null)
return false;
if (other.getNotifications() != null && other.getNotifications().equals(this.getNotifications()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getNotifications() == null) ? 0 : getNotifications().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListNotificationsResult clone() {
try {
return (ListNotificationsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2013 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit.git;
import static org.eclipse.jgit.transport.BasePackPushConnection.CAPABILITY_SIDE_BAND_64K;
import groovy.lang.Binding;
import groovy.util.GroovyScriptEngine;
import java.io.File;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.eclipse.jgit.lib.BatchRefUpdate;
import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.transport.PostReceiveHook;
import org.eclipse.jgit.transport.PreReceiveHook;
import org.eclipse.jgit.transport.ReceiveCommand;
import org.eclipse.jgit.transport.ReceiveCommand.Result;
import org.eclipse.jgit.transport.ReceivePack;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gitblit.Constants;
import com.gitblit.Constants.AccessRestrictionType;
import com.gitblit.IStoredSettings;
import com.gitblit.Keys;
import com.gitblit.client.Translation;
import com.gitblit.manager.IGitblit;
import com.gitblit.models.RepositoryModel;
import com.gitblit.models.UserModel;
import com.gitblit.tickets.BranchTicketService;
import com.gitblit.utils.ArrayUtils;
import com.gitblit.utils.ClientLogger;
import com.gitblit.utils.CommitCache;
import com.gitblit.utils.JGitUtils;
import com.gitblit.utils.RefLogUtils;
import com.gitblit.utils.StringUtils;
/**
* GitblitReceivePack processes receive commands. It also executes Groovy pre-
* and post- receive hooks.
*
* The general execution flow is:
* <ol>
* <li>onPreReceive()</li>
* <li>executeCommands()</li>
* <li>onPostReceive()</li>
* </ol>
*
* @author Android Open Source Project
* @author James Moger
*
*/
public class GitblitReceivePack extends ReceivePack implements PreReceiveHook, PostReceiveHook {
private static final Logger LOGGER = LoggerFactory.getLogger(GitblitReceivePack.class);
protected final RepositoryModel repository;
protected final UserModel user;
protected final File groovyDir;
protected String gitblitUrl;
protected GroovyScriptEngine gse;
protected final IStoredSettings settings;
protected final IGitblit gitblit;
public GitblitReceivePack(
IGitblit gitblit,
Repository db,
RepositoryModel repository,
UserModel user) {
super(db);
this.settings = gitblit.getSettings();
this.gitblit = gitblit;
this.repository = repository;
this.user = user;
this.groovyDir = gitblit.getHooksFolder();
try {
// set Grape root
File grapeRoot = gitblit.getGrapesFolder();
grapeRoot.mkdirs();
System.setProperty("grape.root", grapeRoot.getAbsolutePath());
this.gse = new GroovyScriptEngine(groovyDir.getAbsolutePath());
} catch (IOException e) {
}
// set advanced ref permissions
setAllowCreates(user.canCreateRef(repository));
setAllowDeletes(user.canDeleteRef(repository));
setAllowNonFastForwards(user.canRewindRef(repository));
// setup pre and post receive hook
setPreReceiveHook(this);
setPostReceiveHook(this);
}
/**
* Instrumentation point where the incoming push event has been parsed,
* validated, objects created BUT refs have not been updated. You might
* use this to enforce a branch-write permissions model.
*/
@Override
public void onPreReceive(ReceivePack rp, Collection<ReceiveCommand> commands) {
if (repository.isMirror) {
// repository is a mirror
for (ReceiveCommand cmd : commands) {
sendRejection(cmd, "Gitblit does not allow pushes to \"{0}\" because it is a mirror!", repository.name);
}
return;
}
if (repository.isFrozen) {
// repository is frozen/readonly
for (ReceiveCommand cmd : commands) {
sendRejection(cmd, "Gitblit does not allow pushes to \"{0}\" because it is frozen!", repository.name);
}
return;
}
if (!repository.isBare) {
// repository has a working copy
for (ReceiveCommand cmd : commands) {
sendRejection(cmd, "Gitblit does not allow pushes to \"{0}\" because it has a working copy!", repository.name);
}
return;
}
if (!user.canPush(repository)) {
// user does not have push permissions
for (ReceiveCommand cmd : commands) {
sendRejection(cmd, "User \"{0}\" does not have push permissions for \"{1}\"!", user.username, repository.name);
}
return;
}
if (repository.accessRestriction.atLeast(AccessRestrictionType.PUSH) && repository.verifyCommitter) {
// enforce committer verification
if (StringUtils.isEmpty(user.emailAddress)) {
// reject the push because the pushing account does not have an email address
for (ReceiveCommand cmd : commands) {
sendRejection(cmd, "Sorry, the account \"{0}\" does not have an email address set for committer verification!", user.username);
}
return;
}
// Optionally enforce that the committer of first parent chain
// match the account being used to push the commits.
//
// This requires all merge commits are executed with the "--no-ff"
// option to force a merge commit even if fast-forward is possible.
// This ensures that the chain first parents has the commit
// identity of the merging user.
boolean allRejected = false;
for (ReceiveCommand cmd : commands) {
String firstParent = null;
try {
List<RevCommit> commits = JGitUtils.getRevLog(rp.getRepository(), cmd.getOldId().name(), cmd.getNewId().name());
for (RevCommit commit : commits) {
if (firstParent != null) {
if (!commit.getName().equals(firstParent)) {
// ignore: commit is right-descendant of a merge
continue;
}
}
// update expected next commit id
if (commit.getParentCount() == 0) {
firstParent = null;
} else {
firstParent = commit.getParents()[0].getId().getName();
}
PersonIdent committer = commit.getCommitterIdent();
if (!user.is(committer.getName(), committer.getEmailAddress())) {
// verification failed
String reason = MessageFormat.format("{0} by {1} <{2}> was not committed by {3} ({4}) <{5}>",
commit.getId().name(), committer.getName(), StringUtils.isEmpty(committer.getEmailAddress()) ? "?":committer.getEmailAddress(), user.getDisplayName(), user.username, user.emailAddress);
LOGGER.warn(reason);
cmd.setResult(Result.REJECTED_OTHER_REASON, reason);
allRejected &= true;
break;
} else {
allRejected = false;
}
}
} catch (Exception e) {
LOGGER.error("Failed to verify commits were made by pushing user", e);
}
}
if (allRejected) {
// all ref updates rejected, abort
return;
}
}
for (ReceiveCommand cmd : commands) {
String ref = cmd.getRefName();
if (ref.startsWith(Constants.R_HEADS)) {
switch (cmd.getType()) {
case UPDATE_NONFASTFORWARD:
case DELETE:
// reset branch commit cache on REWIND and DELETE
CommitCache.instance().clear(repository.name, ref);
break;
default:
break;
}
} else if (ref.equals(BranchTicketService.BRANCH)) {
// ensure pushing user is an administrator OR an owner
// i.e. prevent ticket tampering
boolean permitted = user.canAdmin() || repository.isOwner(user.username);
if (!permitted) {
sendRejection(cmd, "{0} is not permitted to push to {1}", user.username, ref);
}
} else if (ref.startsWith(Constants.R_FOR)) {
// prevent accidental push to refs/for
sendRejection(cmd, "{0} is not configured to receive patchsets", repository.name);
}
}
Set<String> scripts = new LinkedHashSet<String>();
scripts.addAll(gitblit.getPreReceiveScriptsInherited(repository));
if (!ArrayUtils.isEmpty(repository.preReceiveScripts)) {
scripts.addAll(repository.preReceiveScripts);
}
runGroovy(commands, scripts);
for (ReceiveCommand cmd : commands) {
if (!Result.NOT_ATTEMPTED.equals(cmd.getResult())) {
LOGGER.warn(MessageFormat.format("{0} {1} because \"{2}\"", cmd.getNewId()
.getName(), cmd.getResult(), cmd.getMessage()));
}
}
}
/**
* Instrumentation point where the incoming push has been applied to the
* repository. This is the point where we would trigger a Jenkins build
* or send an email.
*/
@Override
public void onPostReceive(ReceivePack rp, Collection<ReceiveCommand> commands) {
if (commands.size() == 0) {
LOGGER.debug("skipping post-receive hooks, no refs created, updated, or removed");
return;
}
// log ref changes
for (ReceiveCommand cmd : commands) {
if (Result.OK.equals(cmd.getResult())) {
// add some logging for important ref changes
switch (cmd.getType()) {
case DELETE:
LOGGER.info(MessageFormat.format("{0} DELETED {1} in {2} ({3})", user.username, cmd.getRefName(), repository.name, cmd.getOldId().name()));
break;
case CREATE:
LOGGER.info(MessageFormat.format("{0} CREATED {1} in {2}", user.username, cmd.getRefName(), repository.name));
break;
case UPDATE:
LOGGER.info(MessageFormat.format("{0} UPDATED {1} in {2} (from {3} to {4})", user.username, cmd.getRefName(), repository.name, cmd.getOldId().name(), cmd.getNewId().name()));
break;
case UPDATE_NONFASTFORWARD:
LOGGER.info(MessageFormat.format("{0} UPDATED NON-FAST-FORWARD {1} in {2} (from {3} to {4})", user.username, cmd.getRefName(), repository.name, cmd.getOldId().name(), cmd.getNewId().name()));
break;
default:
break;
}
}
}
if (repository.useIncrementalPushTags) {
// tag each pushed branch tip
String emailAddress = user.emailAddress == null ? rp.getRefLogIdent().getEmailAddress() : user.emailAddress;
PersonIdent userIdent = new PersonIdent(user.getDisplayName(), emailAddress);
for (ReceiveCommand cmd : commands) {
if (!cmd.getRefName().startsWith(Constants.R_HEADS)) {
// only tag branch ref changes
continue;
}
if (!ReceiveCommand.Type.DELETE.equals(cmd.getType())
&& ReceiveCommand.Result.OK.equals(cmd.getResult())) {
String objectId = cmd.getNewId().getName();
String branch = cmd.getRefName().substring(Constants.R_HEADS.length());
// get translation based on the server's locale setting
String template = Translation.get("gb.incrementalPushTagMessage");
String msg = MessageFormat.format(template, branch);
String prefix;
if (StringUtils.isEmpty(repository.incrementalPushTagPrefix)) {
prefix = settings.getString(Keys.git.defaultIncrementalPushTagPrefix, "r");
} else {
prefix = repository.incrementalPushTagPrefix;
}
JGitUtils.createIncrementalRevisionTag(
rp.getRepository(),
objectId,
userIdent,
prefix,
"0",
msg);
}
}
}
// update push log
try {
RefLogUtils.updateRefLog(user, rp.getRepository(), commands);
LOGGER.debug(MessageFormat.format("{0} push log updated", repository.name));
} catch (Exception e) {
LOGGER.error(MessageFormat.format("Failed to update {0} pushlog", repository.name), e);
}
// check for updates pushed to the BranchTicketService branch
// if the BranchTicketService is active it will reindex, as appropriate
for (ReceiveCommand cmd : commands) {
if (Result.OK.equals(cmd.getResult())
&& BranchTicketService.BRANCH.equals(cmd.getRefName())) {
rp.getRepository().fireEvent(new ReceiveCommandEvent(repository, cmd));
}
}
// run Groovy hook scripts
Set<String> scripts = new LinkedHashSet<String>();
scripts.addAll(gitblit.getPostReceiveScriptsInherited(repository));
if (!ArrayUtils.isEmpty(repository.postReceiveScripts)) {
scripts.addAll(repository.postReceiveScripts);
}
runGroovy(commands, scripts);
}
/** Execute commands to update references. */
@Override
protected void executeCommands() {
List<ReceiveCommand> toApply = filterCommands(Result.NOT_ATTEMPTED);
if (toApply.isEmpty()) {
return;
}
ProgressMonitor updating = NullProgressMonitor.INSTANCE;
boolean sideBand = isCapabilityEnabled(CAPABILITY_SIDE_BAND_64K);
if (sideBand) {
SideBandProgressMonitor pm = new SideBandProgressMonitor(msgOut);
pm.setDelayStart(250, TimeUnit.MILLISECONDS);
updating = pm;
}
BatchRefUpdate batch = getRepository().getRefDatabase().newBatchUpdate();
batch.setAllowNonFastForwards(isAllowNonFastForwards());
batch.setRefLogIdent(getRefLogIdent());
batch.setRefLogMessage("push", true);
for (ReceiveCommand cmd : toApply) {
if (Result.NOT_ATTEMPTED != cmd.getResult()) {
// Already rejected by the core receive process.
continue;
}
batch.addCommand(cmd);
}
if (!batch.getCommands().isEmpty()) {
try {
batch.execute(getRevWalk(), updating);
} catch (IOException err) {
for (ReceiveCommand cmd : toApply) {
if (cmd.getResult() == Result.NOT_ATTEMPTED) {
sendRejection(cmd, "lock error: {0}", err.getMessage());
}
}
}
}
}
protected void setGitblitUrl(String url) {
this.gitblitUrl = url;
}
protected void sendRejection(final ReceiveCommand cmd, final String why, Object... objects) {
String text;
if (ArrayUtils.isEmpty(objects)) {
text = why;
} else {
text = MessageFormat.format(why, objects);
}
cmd.setResult(Result.REJECTED_OTHER_REASON, text);
LOGGER.error(text + " (" + user.username + ")");
}
protected void sendHeader(String msg, Object... objects) {
sendInfo("--> ", msg, objects);
}
protected void sendInfo(String msg, Object... objects) {
sendInfo(" ", msg, objects);
}
protected void sendInfo(String prefix, String msg, Object... objects) {
String text;
if (ArrayUtils.isEmpty(objects)) {
text = msg;
super.sendMessage(prefix + msg);
} else {
text = MessageFormat.format(msg, objects);
super.sendMessage(prefix + text);
}
if (!StringUtils.isEmpty(msg)) {
LOGGER.info(text + " (" + user.username + ")");
}
}
protected void sendError(String msg, Object... objects) {
String text;
if (ArrayUtils.isEmpty(objects)) {
text = msg;
super.sendError(msg);
} else {
text = MessageFormat.format(msg, objects);
super.sendError(text);
}
if (!StringUtils.isEmpty(msg)) {
LOGGER.error(text + " (" + user.username + ")");
}
}
/**
* Runs the specified Groovy hook scripts.
*
* @param repository
* @param user
* @param commands
* @param scripts
*/
private void runGroovy(Collection<ReceiveCommand> commands, Set<String> scripts) {
if (scripts == null || scripts.size() == 0) {
// no Groovy scripts to execute
return;
}
Binding binding = new Binding();
binding.setVariable("gitblit", gitblit);
binding.setVariable("repository", repository);
binding.setVariable("receivePack", this);
binding.setVariable("user", user);
binding.setVariable("commands", commands);
binding.setVariable("url", gitblitUrl);
binding.setVariable("logger", LOGGER);
binding.setVariable("clientLogger", new ClientLogger(this));
for (String script : scripts) {
if (StringUtils.isEmpty(script)) {
continue;
}
// allow script to be specified without .groovy extension
// this is easier to read in the settings
File file = new File(groovyDir, script);
if (!file.exists() && !script.toLowerCase().endsWith(".groovy")) {
file = new File(groovyDir, script + ".groovy");
if (file.exists()) {
script = file.getName();
}
}
try {
Object result = gse.run(script, binding);
if (result instanceof Boolean) {
if (!((Boolean) result)) {
LOGGER.error(MessageFormat.format(
"Groovy script {0} has failed! Hook scripts aborted.", script));
break;
}
}
} catch (Exception e) {
LOGGER.error(
MessageFormat.format("Failed to execute Groovy script {0}", script), e);
}
}
}
}
| |
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.varianceswap;
import java.util.Arrays;
import com.opengamma.analytics.financial.interestrate.InstrumentDerivative;
import com.opengamma.analytics.financial.interestrate.InstrumentDerivativeVisitor;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.Currency;
/**
* A variance swap is a forward contract on the realised variance of a generic underlying. This could be a single equity price, the value of an equity index,
* an FX rate or <b>any</b> other financial metric on which a variance swap contract is based.
* <p>
* The floating leg of a variance swap is the realized variance and is calculated using the second moment of log returns of the underlying asset.
* <p>
* Because variance is additive in time, the value of a variance swap can be decomposed at any point in time between realized and implied variance as _varNotional * Z(t,T) * [ t/T * RealizedVol(0,t)^2
* + (T-t)/T * ImpliedVol(t,T)^2 - volStrike^2 ]
*/
public class VarianceSwap implements InstrumentDerivative {
/** The time in years to the start of variance observations */
private final double _timeToObsStart;
/** The time in years to the end of variance observations */
private final double _timeToObsEnd;
/** The time year years to settlement */
private final double _timeToSettlement;
/** The variance strike. volStrike ^ 2 */
private final double _varStrike;
/** The variance notional. 0.5 * _volNotional / _volStrike */
private final double _varNotional;
/** The currency */
private final Currency _currency;
/** The annualization factor */
private final double _annualizationFactor; // typically 252 with daily observations
/** The number of expected observations */
private final int _nObsExpected;
/** The number of missing observations */
private final int _nObsDisrupted;
/** The observed variances */
private final double[] _observations;
/** The observation weights */
private final double[] _observationWeights;
/**
* @param timeToObsStart Time of first observation. Negative if observations have begun.
* @param timeToObsEnd Time of final observation. Negative if observations have finished.
* @param timeToSettlement Time of cash settlement. If negative, the swap has expired.
* @param varStrike Fair value of Variance struck at trade date
* @param varNotional Trade pays the difference between realized and strike variance multiplied by this
* @param currency Currency of cash settlement
* @param annualizationFactor Number of business days per year
* @param nObsExpected Number of observations expected as of trade inception
* @param nObsDisrupted Number of expected observations that did not occur because of a market disruption
* @param observations Array of observations of the underlying spot
* @param observationWeights Array of weights to give observation returns. If null, all weights are 1. Else, length must be: observations.length-1
*/
public VarianceSwap(final double timeToObsStart, final double timeToObsEnd, final double timeToSettlement, final double varStrike, final double varNotional, final Currency currency,
final double annualizationFactor, final int nObsExpected, final int nObsDisrupted, final double[] observations, final double[] observationWeights) {
ArgumentChecker.isTrue(varStrike >= 0, "Require varStrike >= 0");
ArgumentChecker.isTrue(varNotional > 0, "Require varNotional > 0");
ArgumentChecker.notNull(currency, "currency");
ArgumentChecker.isTrue(annualizationFactor > 0, "Require annualizationFactor > 0");
ArgumentChecker.isTrue(nObsExpected > 0, "Encountered a VarianceSwap with 0 expected observations");
ArgumentChecker.notNull(observations, "observations");
ArgumentChecker.notNull(observationWeights, "observationWeights");
ArgumentChecker.isTrue(nObsExpected >= observations.length, "Number of observations ({}) great than expected ({})", observations.length, nObsExpected);
_timeToObsStart = timeToObsStart;
_timeToObsEnd = timeToObsEnd;
_timeToSettlement = timeToSettlement;
_varStrike = varStrike;
_varNotional = varNotional;
_currency = currency;
_annualizationFactor = annualizationFactor;
_nObsExpected = nObsExpected;
_nObsDisrupted = nObsDisrupted;
_observations = observations.clone();
_observationWeights = observationWeights.clone();
if (_observationWeights.length > 1) {
final int nWeights = _observationWeights.length;
final int nObs = _observations.length;
ArgumentChecker.isTrue(nWeights + 1 == nObs, "If provided, observationWeights must be of length one less than observations, as they weight returns log(obs[i]/obs[i-1])."
+ " Found {} weights and {} observations.", nWeights, nObs);
}
}
/**
* Copy constructor
* @param other VarianceSwap to copy from
*/
public VarianceSwap(final VarianceSwap other) {
ArgumentChecker.notNull(other, "variance swap to copy");
_timeToObsStart = other._timeToObsStart;
_timeToObsEnd = other._timeToObsEnd;
_timeToSettlement = other._timeToSettlement;
_varStrike = other._varStrike;
_varNotional = other._varNotional;
_currency = other._currency;
_annualizationFactor = other._annualizationFactor;
_nObsExpected = other._nObsExpected;
_nObsDisrupted = other._nObsDisrupted;
_observations = Arrays.copyOf(other._observations, other._observations.length);
_observationWeights = Arrays.copyOf(other._observationWeights, other._observationWeights.length);
}
/**
* Gets the timeToObsStart.
* @return the timeToObsStart
*/
public double getTimeToObsStart() {
return _timeToObsStart;
}
/**
* Gets the timeToObsEnd.
* @return the timeToObsEnd
*/
public double getTimeToObsEnd() {
return _timeToObsEnd;
}
/**
* Gets the timeToSettlement.
* @return the timeToSettlement
*/
public double getTimeToSettlement() {
return _timeToSettlement;
}
/**
* Gets the nObsExpected.
* @return the nObsExpected
*/
public int getObsExpected() {
return _nObsExpected;
}
/**
* Gets the nObsDisrupted.
* @return the nObsDisrupted
*/
public int getObsDisrupted() {
return _nObsDisrupted;
}
/**
* Gets the currency.
* @return the currency
*/
public Currency getCurrency() {
return _currency;
}
/**
* Gets the varStrike.
* @return the varStrike
*/
public double getVarStrike() {
return _varStrike;
}
/**
* Gets the varNotional.
* @return the varNotional
*/
public double getVarNotional() {
return _varNotional;
}
/**
* Gets the volStrike.
* @return the volStrike
*/
public double getVolStrike() {
return Math.sqrt(_varStrike);
}
/**
* Gets the volNotional.
* @return the volNotional
*/
public double getVolNotional() {
return _varNotional * 2 * Math.sqrt(_varStrike);
}
/**
* Gets the annualizationFactor.
* @return the annualizationFactor
*/
public double getAnnualizationFactor() {
return _annualizationFactor;
}
/**
* Gets the observations.
* @return the observations
*/
public double[] getObservations() {
return _observations;
}
/**
* Gets the observationWeights.
* @return the observationWeights
*/
public final double[] getObservationWeights() {
return _observationWeights;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(_annualizationFactor);
result = prime * result + (int) (temp ^ (temp >>> 32));
result = prime * result + ((_currency == null) ? 0 : _currency.hashCode());
result = prime * result + _nObsDisrupted;
result = prime * result + _nObsExpected;
result = prime * result + ((_observations == null) ? 0 : Arrays.hashCode(_observations));
result = prime * result + ((_observationWeights == null) ? 0 : Arrays.hashCode(_observations));
temp = Double.doubleToLongBits(_timeToObsEnd);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(_timeToObsStart);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(_timeToSettlement);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(_varNotional);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(_varStrike);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof VarianceSwap)) {
return false;
}
final VarianceSwap other = (VarianceSwap) obj;
if (Double.doubleToLongBits(_annualizationFactor) != Double.doubleToLongBits(other._annualizationFactor)) {
return false;
}
if (_currency == null) {
if (other._currency != null) {
return false;
}
} else if (!_currency.equals(other._currency)) {
return false;
}
if (_nObsDisrupted != other._nObsDisrupted) {
return false;
}
if (_nObsExpected != other._nObsExpected) {
return false;
}
if (!Arrays.equals(_observationWeights, other._observationWeights)) {
return false;
}
if (!Arrays.equals(_observations, other._observations)) {
return false;
}
if (Double.doubleToLongBits(_timeToObsEnd) != Double.doubleToLongBits(other._timeToObsEnd)) {
return false;
}
if (Double.doubleToLongBits(_timeToObsStart) != Double.doubleToLongBits(other._timeToObsStart)) {
return false;
}
if (Double.doubleToLongBits(_timeToSettlement) != Double.doubleToLongBits(other._timeToSettlement)) {
return false;
}
if (Double.doubleToLongBits(_varNotional) != Double.doubleToLongBits(other._varNotional)) {
return false;
}
if (Double.doubleToLongBits(_varStrike) != Double.doubleToLongBits(other._varStrike)) {
return false;
}
return true;
}
@Override
public <S, T> T accept(final InstrumentDerivativeVisitor<S, T> visitor, final S data) {
ArgumentChecker.notNull(visitor, "visitor");
return visitor.visitVarianceSwap(this, data);
}
@Override
public <T> T accept(final InstrumentDerivativeVisitor<?, T> visitor) {
ArgumentChecker.notNull(visitor, "visitor");
return visitor.visitVarianceSwap(this);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections.set;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.commons.collections.BulkTest;
/**
* Abstract test class for {@link SortedSet} methods and contracts.
* <p/>
* To use, subclass and override the {@link #makeEmptySet()}
* method. You may have to override other protected methods if your
* set is not modifiable, or if your set restricts what kinds of
* elements may be added; see {@link AbstractTestCollection} for more details.
*
* @author Stephen Colebourne
* @author Dieter Wimberger
* @version $Revision: 480461 $ $Date: 2006-11-29 03:14:27 -0500 (Wed, 29 Nov 2006) $
* @since Commons Collections 3.0
*/
public abstract class AbstractTestSortedSet extends AbstractTestSet {
/**
* JUnit constructor.
*
* @param name name for test
*/
public AbstractTestSortedSet( String name ) {
super(name);
}
//-----------------------------------------------------------------------
/**
* Verification extension, will check the order of elements,
* the sets should already be verified equal.
*/
public void verify() {
super.verify();
// Check that iterator returns elements in order and first() and last()
// are consistent
Iterator colliter = collection.iterator();
Iterator confiter = confirmed.iterator();
Object first = null;
Object last = null;
while( colliter.hasNext() ) {
if( first == null ) {
first = colliter.next();
last = first;
} else {
last = colliter.next();
}
assertEquals("Element appears to be out of order.", last, confiter.next());
}
if( collection.size() > 0 ) {
assertEquals("Incorrect element returned by first().", first,
( (SortedSet) collection ).first());
assertEquals("Incorrect element returned by last().", last,
( (SortedSet) collection ).last());
}
}
//-----------------------------------------------------------------------
/**
* Overridden because SortedSets don't allow null elements (normally).
*
* @return false
*/
public boolean isNullSupported() {
return false;
}
//-----------------------------------------------------------------------
/**
* Returns an empty {@link TreeSet} for use in modification testing.
*
* @return a confirmed empty collection
*/
public Collection makeConfirmedCollection() {
return new TreeSet();
}
//-----------------------------------------------------------------------
/**
* Return the {@link AbstractTestCollection#confirmed} fixture, but cast as a
* SortedSet.
*/
public SortedSet getConfirmedSortedSet() {
return (SortedSet) confirmed;
}
//-----------------------------------------------------------------------
/**
* Override to return comparable objects.
*/
public Object[] getFullNonNullElements() {
Object[] elements = new Object[30];
for( int i = 0; i < 30; i++ ) {
elements[i] = new Integer(i + i + 1);
}
return elements;
}
/**
* Override to return comparable objects.
*/
public Object[] getOtherNonNullElements() {
Object[] elements = new Object[30];
for( int i = 0; i < 30; i++ ) {
elements[i] = new Integer(i + i + 2);
}
return elements;
}
//-----------------------------------------------------------------------
/**
* Bulk test {@link SortedSet#subSet(Object, Object)}. This method runs through all of
* the tests in {@link AbstractTestSortedSet}.
* After modification operations, {@link #verify()} is invoked to ensure
* that the set and the other collection views are still valid.
*
* @return a {@link AbstractTestSet} instance for testing a subset.
*/
public BulkTest bulkTestSortedSetSubSet() {
int length = getFullElements().length;
int lobound = length / 3;
int hibound = lobound * 2;
return new TestSortedSetSubSet(lobound, hibound);
}
/**
* Bulk test {@link SortedSet#headSet(Object)}. This method runs through all of
* the tests in {@link AbstractTestSortedSet}.
* After modification operations, {@link #verify()} is invoked to ensure
* that the set and the other collection views are still valid.
*
* @return a {@link AbstractTestSet} instance for testing a headset.
*/
public BulkTest bulkTestSortedSetHeadSet() {
int length = getFullElements().length;
int lobound = length / 3;
int hibound = lobound * 2;
return new TestSortedSetSubSet(hibound, true);
}
/**
* Bulk test {@link SortedSet#tailSet(Object)}. This method runs through all of
* the tests in {@link AbstractTestSortedSet}.
* After modification operations, {@link #verify()} is invoked to ensure
* that the set and the other collection views are still valid.
*
* @return a {@link AbstractTestSet} instance for testing a tailset.
*/
public BulkTest bulkTestSortedSetTailSet() {
int length = getFullElements().length;
int lobound = length / 3;
return new TestSortedSetSubSet(lobound, false);
}
public class TestSortedSetSubSet extends AbstractTestSortedSet {
private int m_Type;
private int m_LowBound;
private int m_HighBound;
private Object[] m_FullElements;
private Object[] m_OtherElements;
public TestSortedSetSubSet( int bound, boolean head ) {
super("TestSortedSetSubSet");
if( head ) {
//System.out.println("HEADSET");
m_Type = TYPE_HEADSET;
m_HighBound = bound;
m_FullElements = new Object[bound];
System.arraycopy(AbstractTestSortedSet.this.getFullElements(), 0, m_FullElements, 0, bound);
m_OtherElements = new Object[bound - 1];
System.arraycopy(//src src_pos dst dst_pos length
AbstractTestSortedSet.this.getOtherElements(), 0, m_OtherElements, 0, bound - 1);
//System.out.println(new TreeSet(Arrays.asList(m_FullElements)));
//System.out.println(new TreeSet(Arrays.asList(m_OtherElements)));
} else {
//System.out.println("TAILSET");
m_Type = TYPE_TAILSET;
m_LowBound = bound;
Object[] allelements = AbstractTestSortedSet.this.getFullElements();
//System.out.println("bound = "+bound +"::length="+allelements.length);
m_FullElements = new Object[allelements.length - bound];
System.arraycopy(allelements, bound, m_FullElements, 0, allelements.length - bound);
m_OtherElements = new Object[allelements.length - bound - 1];
System.arraycopy(//src src_pos dst dst_pos length
AbstractTestSortedSet.this.getOtherElements(), bound, m_OtherElements, 0, allelements.length - bound - 1);
//System.out.println(new TreeSet(Arrays.asList(m_FullElements)));
//System.out.println(new TreeSet(Arrays.asList(m_OtherElements)));
//resetFull();
//System.out.println(collection);
//System.out.println(confirmed);
}
} //type
public TestSortedSetSubSet( int lobound, int hibound ) {
super("TestSortedSetSubSet");
//System.out.println("SUBSET");
m_Type = TYPE_SUBSET;
m_LowBound = lobound;
m_HighBound = hibound;
int length = hibound - lobound;
//System.out.println("Low=" + lobound + "::High=" + hibound + "::Length=" + length);
m_FullElements = new Object[length];
System.arraycopy(AbstractTestSortedSet.this.getFullElements(), lobound, m_FullElements, 0, length);
m_OtherElements = new Object[length - 1];
System.arraycopy(//src src_pos dst dst_pos length
AbstractTestSortedSet.this.getOtherElements(), lobound, m_OtherElements, 0, length - 1);
//System.out.println(new TreeSet(Arrays.asList(m_FullElements)));
//System.out.println(new TreeSet(Arrays.asList(m_OtherElements)));
}
public boolean isNullSupported() {
return AbstractTestSortedSet.this.isNullSupported();
}
public boolean isAddSupported() {
return AbstractTestSortedSet.this.isAddSupported();
}
public boolean isRemoveSupported() {
return AbstractTestSortedSet.this.isRemoveSupported();
}
public boolean isFailFastSupported() {
return AbstractTestSortedSet.this.isFailFastSupported();
}
public Object[] getFullElements() {
return m_FullElements;
}
public Object[] getOtherElements() {
return m_OtherElements;
}
private SortedSet getSubSet( SortedSet set ) {
Object[] elements = AbstractTestSortedSet.this.getFullElements();
switch( m_Type ) {
case TYPE_SUBSET:
return set.subSet(elements[m_LowBound], elements[m_HighBound]);
case TYPE_HEADSET:
return set.headSet(elements[m_HighBound]);
case TYPE_TAILSET:
return set.tailSet(elements[m_LowBound]);
default:
return null;
}
}
public Set makeEmptySet() {
SortedSet s = (SortedSet) AbstractTestSortedSet.this.makeEmptySet();
return getSubSet(s);
}
public Set makeFullSet() {
SortedSet s = (SortedSet) AbstractTestSortedSet.this.makeFullCollection();
return getSubSet(s);
}
public boolean isTestSerialization() {
return false;
}
public BulkTest bulkTestSortedSetSubSet() {
return null; // prevent infinite recursion
}
public BulkTest bulkTestSortedSetHeadSet() {
return null; // prevent infinite recursion
}
public BulkTest bulkTestSortedSetTailSet() {
return null; // prevent infinite recursion
}
static final int TYPE_SUBSET = 0;
static final int TYPE_TAILSET = 1;
static final int TYPE_HEADSET = 2;
}
}
| |
/*
* RawRatioDataModel.java
*
* Created Jun 29, 2011
*
* Copyright 2006-2015 James F. Bowring and www.Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.Tripoli.dataModels;
import Jama.Matrix;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import org.earthtime.Tripoli.dataModels.collectorModels.AbstractCollectorModel;
import org.earthtime.Tripoli.dataModels.collectorModels.IonCounterCollectorModel;
import org.earthtime.Tripoli.fitFunctions.AbstractFunctionOfX;
import org.earthtime.Tripoli.fitFunctions.AbstractOverDispersionLMAlgorithmInterface;
import org.earthtime.Tripoli.fitFunctions.ConstantFitFunctionWithCovS;
import org.earthtime.Tripoli.fitFunctions.LevenbergMarquardGeneralSolverWithCovS;
import org.earthtime.Tripoli.fitFunctions.LevenbergMarquardGeneralSolverWithCovS.AbstractOverDispersionLMAlgorithm;
import org.earthtime.Tripoli.fitFunctions.LevenbergMarquardGeneralSolverWithVecV;
import org.earthtime.Tripoli.fitFunctions.LevenbergMarquardGeneralSolverWithVecV.AbstractOverDispersionLMVecAlgorithm;
import org.earthtime.Tripoli.fitFunctions.MeanFitFunction;
import org.earthtime.UPb_Redux.utilities.comparators.IntuitiveStringComparator;
import org.earthtime.dataDictionaries.FitFunctionTypeEnum;
import org.earthtime.dataDictionaries.IsotopeNames;
import org.earthtime.dataDictionaries.RawRatioNames;
import org.earthtime.statistics.NonParametricStats;
import org.earthtime.utilities.jamaHelpers.MatrixRemover;
/**
*
* @author James F. Bowring
*/
public class RawRatioDataModel //
implements Serializable, Comparable<RawRatioDataModel>, DataModelInterface, DataModelFitFunctionInterface {
// Class variables
private static final long serialVersionUID = 3111511502335804607L;
/**
*
*/
private boolean USING_FULL_PROPAGATION;
private final RawRatioNames rawRatioName;
private final DataModelInterface topIsotope;
private final DataModelInterface botIsotope;
private double[] ratios;
private double[] logRatios;
private double[] alphas;
private double[] fitFunctionLogValues;
private double[] correctedRatios;
private double[] logDifferencesFromWeightedMean;
private boolean usedForFractionationCorrections;
// oct 2014
private boolean usedForCommonLeadCorrections;
private double standardValue;
private boolean[] dataActiveMap;
private long COLLECTOR_DATA_FREQUENCY_MILLISECS;
// values calculated about the residuals from the fitted down hole fractionation curve
private double meanOfResidualsFromFittedFractionation;
private double stdErrOfmeanOfResidualsFromFittedFractionation;
private double meanOfCorrectedRatios;
private double stdDevOfCorrectedRatios;
private double stdErrOfMeanCorrectedRatios;
// used for individual intercept fractionation
private Map<String, AbstractFunctionOfX> logRatioFitFunctionsNoOD;
private Map<String, AbstractFunctionOfX> logRatioFitFunctionsWithOD;
private AbstractFunctionOfX downHoleFitFunction;
/**
*
*/
protected boolean overDispersionSelected;
private FitFunctionTypeEnum selectedFitFunctionType;
private boolean overDispersionSelectedDownHole;
private boolean belowDetection;
private Matrix SlogRatioX_Yfull;
private transient Matrix SlogRatioX_Y;
private transient Matrix matrixSxyod;
private boolean calculatedInitialFitFunctions;
// these three introduced feb 2013 to streamline choice of points in function fitting
private double[] activeXvalues;
private double[] activeYvalues;
private boolean[] activeData;//dataActiveMap with inactive removed, i.e. all true
// nov 2014
private Matrix topSopbclr;
private Matrix botSopbclr;
/**
*
* @param rawRatioName
* @param topIsotope
* @param botIsotope
* @param usedForFractionationCorrections
* @param usedForCommonLeadCorrection the value of
* usedForCommonLeadCorrection
* @param collectorDataFrequencyMillisecs
*/
public RawRatioDataModel( //
RawRatioNames rawRatioName, DataModelInterface topIsotope, DataModelInterface botIsotope, boolean usedForFractionationCorrections, boolean usedForCommonLeadCorrection, long collectorDataFrequencyMillisecs) {
this.rawRatioName = rawRatioName;
this.topIsotope = topIsotope;
this.botIsotope = botIsotope;
this.usedForFractionationCorrections = usedForFractionationCorrections;
this.usedForCommonLeadCorrections = usedForCommonLeadCorrection;
this.COLLECTOR_DATA_FREQUENCY_MILLISECS = collectorDataFrequencyMillisecs;
this.USING_FULL_PROPAGATION = true;
this.correctedRatios = null;
this.ratios = null;
this.logRatios = null;
this.alphas = null;
this.fitFunctionLogValues = null;
this.logDifferencesFromWeightedMean = null;
this.meanOfResidualsFromFittedFractionation = 0.0;
this.stdErrOfmeanOfResidualsFromFittedFractionation = 0.0;
this.meanOfCorrectedRatios = 0.0;
this.stdDevOfCorrectedRatios = 0.0;
this.stdErrOfMeanCorrectedRatios = 0.0;
this.logRatioFitFunctionsNoOD = new TreeMap<>();
this.logRatioFitFunctionsWithOD = new TreeMap<>();
this.overDispersionSelected = true;
this.selectedFitFunctionType = FitFunctionTypeEnum.LINE;
this.overDispersionSelectedDownHole = true;
this.belowDetection = false;
this.SlogRatioX_Yfull = null;
this.SlogRatioX_Y = null;
this.calculatedInitialFitFunctions = false;
}
/**
*
* @param rm
* @return
*/
@Override
public int compareTo(RawRatioDataModel rm) {
String rmName = rm.getRawRatioModelName().getName();
String myName = this.getRawRatioModelName().getName();
Comparator<String> intuitiveString = new IntuitiveStringComparator<>();
return intuitiveString.compare(myName, rmName);
}
// TODO: equals and hashcode
/**
*
* @param index
* @param included
*/
@Override
public void toggleOneDataAquisition(int index, boolean included) {
dataActiveMap[index] = included;
topIsotope.toggleOneDataAquisition(index, included);
botIsotope.toggleOneDataAquisition(index, included);
}
/**
*
*/
@Override
public void applyMaskingArray() {
dataActiveMap = MaskingSingleton.getInstance().applyMask(dataActiveMap.clone());// .getMaskingArray().clone();
topIsotope.applyMaskingArray();
botIsotope.applyMaskingArray();
}
/**
*
*/
public void calculateRawAndLogRatios() {
ratios = new double[((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().getIntensities().length];
logRatios = new double[ratios.length];
if (((RawIntensityDataModel) topIsotope).isBelowDetection() || ((RawIntensityDataModel) botIsotope).isBelowDetection()) {
belowDetection = true;
} else {
double[] topCorrectedIntensities;
double[] botCorrectedIntensities;
double[] topLogCorrectedIntensities;
double[] botLogCorrectedIntensities;
topCorrectedIntensities = ((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().getCorrectedIntensities();
botCorrectedIntensities = ((RawIntensityDataModel) botIsotope).getOnPeakVirtualCollector().getCorrectedIntensities();
topLogCorrectedIntensities = ((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().getLogCorrectedIntensities();
botLogCorrectedIntensities = ((RawIntensityDataModel) botIsotope).getOnPeakVirtualCollector().getLogCorrectedIntensities();
alphas = new double[topCorrectedIntensities.length];
fitFunctionLogValues = new double[topCorrectedIntensities.length];
// calculate ratios
for (int i = 0; i < ratios.length; i++) {
double top = topCorrectedIntensities[i];
double bot = botCorrectedIntensities[i];
// dec 2012 these ratios are going to be for plotting only as we switch to log ratios for unct prop
ratios[i] = top / bot;
logRatios[i] = topLogCorrectedIntensities[i] - botLogCorrectedIntensities[i];
}
calculateAlphas();
}
}
/**
*
* @return
*/
public String outputLogRatios() {
String retval = //
this.getDataModelName() + " \tLogRatios:\t";
for (int i = 0; i < logRatios.length; i++) {
retval += logRatios[i] + ", ";
}
return retval;
}
/**
*
*/
public void propagateUnctInRatios() {
// April 2015
// refactor to improve performance
// Since SLogRatioX_Y is calculated on the first pass, we can merely
// toggle rows and columns per dataactive map instead of recalculating everything
// make the current version transient and save only the full
if (SlogRatioX_Yfull == null) {
// create all true dataActiveMap for initial pass
boolean[] allTrueDataActiveMap = new boolean[dataActiveMap.length];
Arrays.fill(allTrueDataActiveMap, Boolean.TRUE);
boolean[] topDataActiveMap = ((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().getDataActiveMap();
((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().setDataActiveMap(allTrueDataActiveMap);
((RawIntensityDataModel) topIsotope).prepareDataForFitFunctions();
((RawIntensityDataModel) topIsotope).propagateUnctInBaselineCorrOnPeakIntensities();
boolean[] botDataActiveMap = ((RawIntensityDataModel) botIsotope).getOnPeakVirtualCollector().getDataActiveMap();
((RawIntensityDataModel) botIsotope).getOnPeakVirtualCollector().setDataActiveMap(allTrueDataActiveMap);//dataActiveMap.clone());
((RawIntensityDataModel) botIsotope).prepareDataForFitFunctions();
((RawIntensityDataModel) botIsotope).propagateUnctInBaselineCorrOnPeakIntensities();
Matrix numerator = ((RawIntensityDataModel) topIsotope).getSopbclr();
Matrix denominator = ((RawIntensityDataModel) botIsotope).getSopbclr();
if ((numerator != null) & (denominator != null)) {
// only if both numerator and denominator are ion counters do we do matrixSxyod below
if ((topIsotope.getCollectorModel() //
instanceof IonCounterCollectorModel)//
&& //
(botIsotope.getCollectorModel() //
instanceof IonCounterCollectorModel)//
&&//
// v3 jan 2013 check if the SAME ion counter
hasTwoIdenticalIonCounters()) {
matrixSxyod = //
((RawIntensityDataModel) topIsotope).getColumnVectorOfCorrectedOnPeakIntensities()//
.times(((RawIntensityDataModel) botIsotope).getColumnVectorOfCorrectedOnPeakIntensities().transpose());
double deadtimeOneSigmaAbsSqr = //
((IonCounterCollectorModel) botIsotope//
.getCollectorModel()).getDeadTime().getOneSigmaAbs().movePointLeft(0).pow(2).doubleValue();
matrixSxyod.timesEquals(deadtimeOneSigmaAbsSqr);
SlogRatioX_Yfull = numerator.plus(denominator).minus(matrixSxyod.times(2.0));
} else {
try {
SlogRatioX_Yfull = numerator.plus(denominator);
} catch (Exception e) {
System.out.println("SlogRatioX_Yfull trouble" + e.getMessage());
}
}
// restore active data maps to normal status
((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().setDataActiveMap(topDataActiveMap);
((RawIntensityDataModel) botIsotope).getOnPeakVirtualCollector().setDataActiveMap(botDataActiveMap);
topSopbclr = numerator.copy();
botSopbclr = denominator.copy();
}
((DataModelFitFunctionInterface) topIsotope).cleanupUnctCalcs();
((DataModelFitFunctionInterface) botIsotope).cleanupUnctCalcs();
System.gc();
}
calculateSlogRatioX_Y();
}
public void calculateSlogRatioX_Y() {
calculateSlogRatioX_Y(dataActiveMap);
}
public void calculateSlogRatioX_Y(boolean[] mapOfActiveData) {
// choose rows and columns based on active data
// nov 2014 need to catch special case where */pb204 ratios have different dataactivemaps
if (SlogRatioX_Yfull != null) {
ArrayList<Integer> selectedRowsColsList = new ArrayList<>();
for (int i = 0; i < mapOfActiveData.length; i++) {
if (mapOfActiveData[i]) {
selectedRowsColsList.add(i);
}
}
int[] selectedRowsCols = new int[selectedRowsColsList.size()];
for (int i = 0; i < selectedRowsCols.length; i++) {
selectedRowsCols[i] = selectedRowsColsList.get(i);
}
SlogRatioX_Y = SlogRatioX_Yfull.getMatrix(selectedRowsCols, selectedRowsCols);
} else {
SlogRatioX_Y = null;
}
}
/**
*
* @return
*/
public String outputLogRatioFitFunctionParameters() {
String retval = getDataModelName() + "\n";
AbstractFunctionOfX fOfXcurrent = getSelectedFitFunction();
if (fOfXcurrent == null) {
retval += "NO FIT FUNCTION";
} else {
retval += fOfXcurrent.showParameters() + "\n";
}
return retval;
}
/**
*
* @return
*/
public String outputLogRatioFitFunctionYInterceptData() {
String retval = getDataModelName() + "\n";
AbstractFunctionOfX fOfXcurrent = getSelectedFitFunction();
if (fOfXcurrent == null) {
retval += "NO FIT FUNCTION";
} else {
retval += fOfXcurrent.showYInterceptData() + "\n";
}
return retval;
}
/**
*
* @return
*/
public double[] calculateAlphas() {
for (int i = 0; i < alphas.length; i++) {
alphas[i] = calculateAlpha(logRatios[i]);
}
return alphas;
}
/**
*
* @param logRatio
* @return
*/
public double calculateAlpha(double logRatio) {
return (standardValue / Math.exp(logRatio)) - 1.0;
}
/**
*
*/
public void generateFitFunctionsForDownhole() {
// calculate the logDifferencesFromWeightedMean between logratios and fit function
if (downHoleFitFunction != null) {
boolean[] dataCommonActiveMap = MaskingSingleton.getInstance().getMaskingArray();
int countOfActiveData = 0;
for (int i = 0; i < dataCommonActiveMap.length; i++) {
if (dataCommonActiveMap[i]) {
countOfActiveData++;
}
}
activeData = new boolean[countOfActiveData];
logDifferencesFromWeightedMean = new double[countOfActiveData];
double[] normalizedOnPeakAquireTimes = getNormalizedOnPeakAquireTimes();
ArrayList<Integer> matrixIndicesToRemove = new ArrayList<>();
int index = 0;
for (int i = 0; i < dataCommonActiveMap.length; i++) {
if (dataCommonActiveMap[i]) {
activeData[index] = true;
logDifferencesFromWeightedMean[index] = downHoleFitFunction.f(normalizedOnPeakAquireTimes[i]) - logRatios[i];
index++;
}
}
Matrix matrixSfCopy = downHoleFitFunction.getMatrixSf().copy();
calculateSlogRatioX_Y(dataCommonActiveMap);
Matrix SfPlusSlogRarioX_Y = matrixSfCopy.plus(getSlogRatioX_Y(false));
AbstractOverDispersionLMAlgorithm algorithmForMEAN = LevenbergMarquardGeneralSolverWithCovS.getInstance()//
.getSelectedLMAlgorithm(//
FitFunctionTypeEnum.MEAN,//
activeData, //
null, //this is mean so x does not matter
logDifferencesFromWeightedMean,//
SfPlusSlogRarioX_Y,//matrixSfCopy.plus(getSlogRatioX_Y(false)),//
false);
// algorithmForMEAN contains both the non OD and OD versions
AbstractFunctionOfX fOfX_MEAN = algorithmForMEAN.getInitialFofX();
AbstractFunctionOfX fOfX_MEAN_OD;
if ((fOfX_MEAN != null) && fOfX_MEAN.verifyPositiveVariances()) {
fOfX_MEAN_OD = algorithmForMEAN.getFinalFofX();
if ((fOfX_MEAN_OD != null) && fOfX_MEAN_OD.verifyPositiveVariances()) {
} else {
fOfX_MEAN_OD = fOfX_MEAN;
}
} else {
// to handle really bad data sets, for which LM wont work, do good old fashioned mean
System.out.println("LM would not fit mean , so using arithmetic mean fit");
fOfX_MEAN = MeanFitFunction.getInstance()//
.getFunctionOfX(//
activeData, //
activeXvalues, //
logDifferencesFromWeightedMean,//
SfPlusSlogRarioX_Y,//matrixSfCopy.plus(getSlogRatioX_Y(false)),//
false);
fOfX_MEAN_OD = fOfX_MEAN;
}
meanOfResidualsFromFittedFractionation = fOfX_MEAN_OD.getA();
stdErrOfmeanOfResidualsFromFittedFractionation = fOfX_MEAN_OD.getStdErrOfA();
logRatioFitFunctionsNoOD.put(FitFunctionTypeEnum.MEAN_DH.getName(), fOfX_MEAN);
logRatioFitFunctionsWithOD.put(FitFunctionTypeEnum.MEAN_DH.getName(), fOfX_MEAN_OD);
overDispersionSelectedDownHole = true;
}
}
/**
*
*/
@Override
public void calculateCorrectedRatioStatistics() {
NonParametricStats nonParametricStats = NonParametricStats.getInstance();
if (correctedRatios != null) {
nonParametricStats.calculateStats(dataActiveMap, correctedRatios);
meanOfCorrectedRatios = nonParametricStats.getSampleMean();
stdDevOfCorrectedRatios = Math.sqrt(nonParametricStats.getVariance());
stdErrOfMeanCorrectedRatios = nonParametricStats.getStdErrSampleMean();
}
}
private void generateCONSTANTfitFunction() {
// CONSTANT *********************************************************
AbstractFunctionOfX fOfX_CONSTANT;
if (USING_FULL_PROPAGATION) {
fOfX_CONSTANT = ConstantFitFunctionWithCovS.getInstance().getFunctionOfX(//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false);
} else {
fOfX_CONSTANT = ConstantFitFunctionWithCovS.getInstance().getFunctionOfX(//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false);
}
logRatioFitFunctionsNoOD.put(FitFunctionTypeEnum.CONSTANT.getName(), fOfX_CONSTANT);
logRatioFitFunctionsWithOD.put(FitFunctionTypeEnum.CONSTANT.getName(), fOfX_CONSTANT);
}
private void generateMEANfitFunctionForPbcRatiosWhereNegativeValues() {
//create mean ratio (no log due to negative values)- we are here because this ratio is */204 and has >10% negative values
NonParametricStats nonParametricStats = NonParametricStats.getInstance();
double[] isotopeOPBC = ((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().getCorrectedIntensities();
nonParametricStats.calculateStats(dataActiveMap, isotopeOPBC);
((RawIntensityDataModel) topIsotope).setForcedMeanForCommonLeadRatios(nonParametricStats.getSampleMean());
isotopeOPBC = ((RawIntensityDataModel) botIsotope).getOnPeakVirtualCollector().getCorrectedIntensities();
nonParametricStats.calculateStats(dataActiveMap, isotopeOPBC);
((RawIntensityDataModel) botIsotope).setForcedMeanForCommonLeadRatios(nonParametricStats.getSampleMean());
double forcedMean;
try {
forcedMean = ((RawIntensityDataModel) topIsotope).getForcedMeanForCommonLeadRatios() / ((RawIntensityDataModel) botIsotope).getForcedMeanForCommonLeadRatios();
} catch (Exception e) {
forcedMean = 0.0;
}
double[] forcedMeanValues = new double[activeData.length];
for (int i = 0; i < forcedMeanValues.length; i++) {
forcedMeanValues[i] = forcedMean;
}
AbstractFunctionOfX fOfX_FORCEDMEAN;
// april 2015 replacing code below
Matrix matrixTopMeanBotMean = new Matrix(//
new double[]{//
((RawIntensityDataModel) topIsotope).getForcedMeanForCommonLeadRatios(), //
((RawIntensityDataModel) botIsotope).getForcedMeanForCommonLeadRatios()}, //
1);
fOfX_FORCEDMEAN = MeanFitFunction.getInstance()//
.getFunctionOfX(//
activeData, //
activeXvalues, //
activeYvalues,//
matrixTopMeanBotMean,//null, //
false);
fOfX_FORCEDMEAN.setShortName(FitFunctionTypeEnum.MEANRATIO);
logRatioFitFunctionsNoOD.put(FitFunctionTypeEnum.MEANRATIO.getName(), fOfX_FORCEDMEAN);
logRatioFitFunctionsWithOD.put(FitFunctionTypeEnum.MEANRATIO.getName(), fOfX_FORCEDMEAN);
logRatioFitFunctionsNoOD.remove(FitFunctionTypeEnum.MEAN.getName());
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.MEAN.getName());
}
private boolean generateMEANfitFunctionUsingLM() {
boolean retVal;
// algorithmForMEAN contains both the non OD and OD versions
AbstractFunctionOfX fOfX_MEAN;
AbstractFunctionOfX fOfX_MEAN_OD;
if (USING_FULL_PROPAGATION) {
try {
AbstractOverDispersionLMAlgorithm algorithmForMEAN = LevenbergMarquardGeneralSolverWithCovS.getInstance()//
.getSelectedLMAlgorithm(//
FitFunctionTypeEnum.MEAN,//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false);
fOfX_MEAN = algorithmForMEAN.getInitialFofX();
fOfX_MEAN_OD = algorithmForMEAN.getFinalFofX();
} catch (Exception e) {
fOfX_MEAN = null;
fOfX_MEAN_OD = null;
}
} else {
try {
AbstractOverDispersionLMVecAlgorithm algorithmForMEAN = LevenbergMarquardGeneralSolverWithVecV.getInstance()//
.getSelectedLMAlgorithm(//
FitFunctionTypeEnum.MEAN,//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false);
fOfX_MEAN = algorithmForMEAN.getInitialFofX();
fOfX_MEAN_OD = algorithmForMEAN.getFinalFofX();
} catch (Exception e) {
fOfX_MEAN = null;
fOfX_MEAN_OD = null;
}
}
if ((fOfX_MEAN != null) && fOfX_MEAN.verifyPositiveVariances()) {
if (logRatioFitFunctionsNoOD.containsKey(fOfX_MEAN.getShortNameString())) {
logRatioFitFunctionsNoOD.remove(fOfX_MEAN.getShortNameString());
logRatioFitFunctionsNoOD.put(fOfX_MEAN.getShortNameString(), fOfX_MEAN);
} else {
logRatioFitFunctionsNoOD.put(fOfX_MEAN.getShortNameString(), fOfX_MEAN);
}
if ((fOfX_MEAN_OD != null) && fOfX_MEAN_OD.verifyPositiveVariances()) {
if (logRatioFitFunctionsWithOD.containsKey(fOfX_MEAN_OD.getShortNameString())) {
logRatioFitFunctionsWithOD.remove(fOfX_MEAN_OD.getShortNameString());
logRatioFitFunctionsWithOD.put(fOfX_MEAN_OD.getShortNameString(), fOfX_MEAN_OD);
} else {
logRatioFitFunctionsWithOD.put(fOfX_MEAN_OD.getShortNameString(), fOfX_MEAN_OD);
}
} else {
logRatioFitFunctionsWithOD.put(fOfX_MEAN.getShortNameString(), fOfX_MEAN);
}
retVal = true;
} else {
// to handle really bad data sets, for which LM wont work, do good old fashioned mean
System.out.println("LM would not fit mean , so using arithmetic mean fit");
// nov 2014 - for the case of common lead */pb204 we use the matrix slot for a matrix containing the two means top and bottom
Matrix matrixTopMeanBotMean = new Matrix(//
new double[]{//
((RawIntensityDataModel) topIsotope).getForcedMeanForCommonLeadRatios(), //
((RawIntensityDataModel) botIsotope).getForcedMeanForCommonLeadRatios()}, //
1);
fOfX_MEAN = MeanFitFunction.getInstance()//
.getFunctionOfX(//
activeData, //
activeXvalues, //
activeYvalues,//
matrixTopMeanBotMean,//null, //
false);
logRatioFitFunctionsNoOD.put(FitFunctionTypeEnum.MEAN.getName(), fOfX_MEAN);
logRatioFitFunctionsWithOD.put(FitFunctionTypeEnum.MEAN.getName(), fOfX_MEAN);
selectedFitFunctionType = FitFunctionTypeEnum.MEAN;
retVal = false;
}
return retVal;
}
private void generateLINEfitFunctionUsingLM() {
// algorithmForLINE contains both the non OD and OD versions
AbstractFunctionOfX fOfX_LINE;
AbstractFunctionOfX fOfX_LINE_OD;
if (USING_FULL_PROPAGATION) {
AbstractOverDispersionLMAlgorithm algorithmForLINE = LevenbergMarquardGeneralSolverWithCovS.getInstance()//
.getSelectedLMAlgorithm(//
FitFunctionTypeEnum.LINE,//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false);
fOfX_LINE = algorithmForLINE.getInitialFofX();
fOfX_LINE_OD = algorithmForLINE.getFinalFofX();
} else {
AbstractOverDispersionLMVecAlgorithm algorithmForLINE = LevenbergMarquardGeneralSolverWithVecV.getInstance()//
.getSelectedLMAlgorithm(//
FitFunctionTypeEnum.LINE,//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false);
fOfX_LINE = algorithmForLINE.getInitialFofX();
fOfX_LINE_OD = algorithmForLINE.getFinalFofX();
}
if ((fOfX_LINE != null) && fOfX_LINE.verifyPositiveVariances()) {
if (logRatioFitFunctionsNoOD.containsKey(fOfX_LINE.getShortNameString())) {
logRatioFitFunctionsNoOD.remove(fOfX_LINE.getShortNameString());
logRatioFitFunctionsNoOD.put(fOfX_LINE.getShortNameString(), fOfX_LINE);
} else {
logRatioFitFunctionsNoOD.put(fOfX_LINE.getShortNameString(), fOfX_LINE);
}
if ((fOfX_LINE_OD != null) && fOfX_LINE_OD.verifyPositiveVariances()) {
if (logRatioFitFunctionsWithOD.containsKey(fOfX_LINE_OD.getShortNameString())) {
logRatioFitFunctionsWithOD.remove(fOfX_LINE_OD.getShortNameString());
logRatioFitFunctionsWithOD.put(fOfX_LINE_OD.getShortNameString(), fOfX_LINE_OD);
} else {
logRatioFitFunctionsWithOD.put(fOfX_LINE_OD.getShortNameString(), fOfX_LINE_OD);
}
} else {
logRatioFitFunctionsWithOD.put(fOfX_LINE.getShortNameString(), fOfX_LINE);
}
} else {
logRatioFitFunctionsNoOD.remove(FitFunctionTypeEnum.LINE.getName());
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.LINE.getName());
selectedFitFunctionType = FitFunctionTypeEnum.MEAN;
}
}
private void generateEXPONENTIALfitFunctionUsingLM() {
System.out.println("trying expfast");
AbstractFunctionOfX fOfX_ExpFast = null;
if (USING_FULL_PROPAGATION) {
AbstractOverDispersionLMAlgorithm algorithmForEXPFAST = LevenbergMarquardGeneralSolverWithCovS.getInstance().getSelectedLMAlgorithm(//
FitFunctionTypeEnum.EXPFAST,//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false);
fOfX_ExpFast = algorithmForEXPFAST.getInitialFofX();
} else {
AbstractOverDispersionLMVecAlgorithm algorithmForEXPFAST = LevenbergMarquardGeneralSolverWithVecV.getInstance().getSelectedLMAlgorithm(//
FitFunctionTypeEnum.EXPFAST,//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false);
fOfX_ExpFast = algorithmForEXPFAST.getInitialFofX();
}
AbstractOverDispersionLMAlgorithmInterface algorithmForEXPMAT;
if (fOfX_ExpFast != null) //
{
System.out.println("now trying expmat with expfast input");
if (USING_FULL_PROPAGATION) {
algorithmForEXPMAT = LevenbergMarquardGeneralSolverWithCovS.getInstance().getSelectedLMAlgorithmUsingIntialFofX(//
FitFunctionTypeEnum.EXPMAT,//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false, //
fOfX_ExpFast);
} else {
algorithmForEXPMAT = LevenbergMarquardGeneralSolverWithVecV.getInstance().getSelectedLMAlgorithmUsingIntialFofX(//
FitFunctionTypeEnum.EXPMAT,//
activeData, //d
activeXvalues,
activeYvalues, //,
SlogRatioX_Y, false, //
fOfX_ExpFast);
}
AbstractFunctionOfX fOfX_EXPMAT = algorithmForEXPMAT.getFinalFofX();
if ((fOfX_EXPMAT != null) && (fOfX_EXPMAT.verifyPositiveVariances())) {
if (logRatioFitFunctionsNoOD.containsKey(fOfX_EXPMAT.getShortNameString())) {
logRatioFitFunctionsNoOD.remove(fOfX_EXPMAT.getShortNameString());
logRatioFitFunctionsNoOD.put(fOfX_EXPMAT.getShortNameString(), fOfX_EXPMAT);
} else {
logRatioFitFunctionsNoOD.put(fOfX_EXPMAT.getShortNameString(), fOfX_EXPMAT);
}
if (fOfX_EXPMAT.getMSWD() >= 1.0) {
System.out.println("now trying expOD with expfast input");
AbstractFunctionOfX fOfX_EXPOD;
if (USING_FULL_PROPAGATION) {
AbstractOverDispersionLMAlgorithm algorithmForEXPOD = LevenbergMarquardGeneralSolverWithCovS.getInstance().getSelectedLMAlgorithmUsingIntialFofX(//
FitFunctionTypeEnum.EXPONENTIAL,//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false,//
fOfX_ExpFast);
fOfX_EXPOD = algorithmForEXPOD.getFinalFofX();
} else {
AbstractOverDispersionLMVecAlgorithm algorithmForEXPOD = LevenbergMarquardGeneralSolverWithVecV.getInstance().getSelectedLMAlgorithmUsingIntialFofX(//
FitFunctionTypeEnum.EXPONENTIAL,//
activeData, //
activeXvalues, //
activeYvalues, //,
SlogRatioX_Y, false,//
fOfX_ExpFast);//
fOfX_EXPOD = algorithmForEXPOD.getFinalFofX();
}
if ((fOfX_EXPOD != null) && (fOfX_EXPOD.verifyPositiveVariances())) {
if (logRatioFitFunctionsWithOD.containsKey(fOfX_EXPOD.getShortNameString())) {
logRatioFitFunctionsWithOD.remove(fOfX_EXPOD.getShortNameString());
logRatioFitFunctionsWithOD.put(fOfX_EXPOD.getShortNameString(), fOfX_EXPOD);
} else {
logRatioFitFunctionsWithOD.put(fOfX_EXPOD.getShortNameString(), fOfX_EXPOD);
}
} else {
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.EXPONENTIAL.getName());
selectedFitFunctionType = FitFunctionTypeEnum.LINE;
}
} else {
logRatioFitFunctionsWithOD.put(fOfX_EXPMAT.getShortNameString(), fOfX_EXPMAT);
}
} else {
logRatioFitFunctionsNoOD.remove(FitFunctionTypeEnum.EXPMAT.getName());
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.EXPMAT.getName());
logRatioFitFunctionsNoOD.remove(FitFunctionTypeEnum.EXPONENTIAL.getName());
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.EXPONENTIAL.getName());
selectedFitFunctionType = FitFunctionTypeEnum.LINE;
}
} else {
logRatioFitFunctionsNoOD.remove(FitFunctionTypeEnum.EXPMAT.getName());
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.EXPMAT.getName());
logRatioFitFunctionsNoOD.remove(FitFunctionTypeEnum.EXPONENTIAL.getName());
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.EXPONENTIAL.getName());
selectedFitFunctionType = FitFunctionTypeEnum.LINE;
}
// march 2014 be sure line is valid
if (!logRatioFitFunctionsNoOD.containsKey(selectedFitFunctionType.getName())) {
selectedFitFunctionType = FitFunctionTypeEnum.MEAN;
}
if (!logRatioFitFunctionsWithOD.containsKey(selectedFitFunctionType.getName())) {
selectedFitFunctionType = FitFunctionTypeEnum.MEAN;
}
}
/**
*
*/
@Override
public void cleanupUnctCalcs() {
// April 2015 saving a copy and using transient setSlogRatioX_Y(null);
setMatrixSxyod(null);
}
/**
*
*
* @param propagateUncertainties the value of propagateUncertainties
* @param doApplyMaskingArray the value of doApplyMaskingArray
*/
@Override
public void generateSetOfFitFunctions(boolean propagateUncertainties, boolean doApplyMaskingArray) {
if (!belowDetection && (usedForFractionationCorrections || usedForCommonLeadCorrections)) {
// april 2014
if (doApplyMaskingArray) {
applyMaskingArray();
}
// nov 2014 - detect if common lead ratios have entered or left negative territory
if (botIsotope.getDataModelName().equalsIgnoreCase(IsotopeNames.Pb204.getName())) {
NonParametricStats nonParametricStats = NonParametricStats.getInstance();
((RawIntensityDataModel) botIsotope).setForceMeanForCommonLeadRatios(//
nonParametricStats.determineIfTenPercentOrMoreAreNegative(//
dataActiveMap, //
((RawIntensityDataModel) botIsotope).getOnPeakVirtualCollector().getCorrectedIntensities()));
}
if (propagateUncertainties) {
propagateUnctInRatios();
} else {
//nov 2015
calculateSlogRatioX_Y();
}
// feb 2013 clean up choice of points
int countOfActiveData = 0;
for (int i = 0; i < dataActiveMap.length; i++) {
if (dataActiveMap[i]) {
countOfActiveData++;
}
}
activeData = new boolean[countOfActiveData];
activeXvalues = new double[countOfActiveData];
activeYvalues = new double[countOfActiveData];
double[] topCorrectedIntensities = ((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().getCorrectedIntensities();
double[] activeTopCorrectedIntensities = new double[countOfActiveData];
double[] botCorrectedIntensities = ((RawIntensityDataModel) botIsotope).getOnPeakVirtualCollector().getCorrectedIntensities();
double[] activeBotCorrectedIntensities = new double[countOfActiveData];
double[] onPeakTimes = topIsotope.getNormalizedOnPeakAquireTimes();
int index = 0;
for (int i = 0; i < dataActiveMap.length; i++) {
if (dataActiveMap[i]) {
activeData[index] = true;
activeXvalues[index] = onPeakTimes[i];
activeYvalues[index] = logRatios[i];
activeTopCorrectedIntensities[index] = topCorrectedIntensities[i];
activeBotCorrectedIntensities[index] = botCorrectedIntensities[i];
index++;
}
}
// feb 2013 new strategy to do only once
// also MEAN returns false if it had to use an arithmentic mean and stops further processing
System.out.println("\nCalculate Fit Functions for Ratio " + getRawRatioModelName().getDisplayName() //
+ " USING " + (USING_FULL_PROPAGATION ? "FULL PROPAGATION" : "FAST PROPAGATION") + " COUNT = " + countOfActiveData);
FitFunctionTypeEnum saveSelection = selectedFitFunctionType;
// nov 2014
if (isForceMeanForCommonLeadRatios()) {
generateMEANfitFunctionForPbcRatiosWhereNegativeValues();
saveSelection = FitFunctionTypeEnum.MEANRATIO;
} else if (generateMEANfitFunctionUsingLM()) {
// nov 2014 force mean for Pbc */204 cases where negative values (<10%) have been turned off by not proceeding
logRatioFitFunctionsNoOD.remove(FitFunctionTypeEnum.MEANRATIO.getName());
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.MEANRATIO.getName());
if (!usedForCommonLeadCorrections) {
try {
generateLINEfitFunctionUsingLM();
} catch (Exception e) {
System.out.println("Exception generating LINE");
logRatioFitFunctionsNoOD.remove(FitFunctionTypeEnum.LINE.getName());
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.LINE.getName());
}
try {
generateEXPONENTIALfitFunctionUsingLM();
} catch (Exception e) {
System.out.println("Exception generating EXPONENTIAL");
logRatioFitFunctionsNoOD.remove(FitFunctionTypeEnum.EXPONENTIAL.getName());
logRatioFitFunctionsWithOD.remove(FitFunctionTypeEnum.EXPONENTIAL.getName());
}
calculatedInitialFitFunctions = true;
if (hasTwoIdenticalIonCounters()) {
// section 7.a.2
double[] intDiffValues = new double[countOfActiveData];
for (int i = 0; i < countOfActiveData; i++) {
intDiffValues[i] = activeTopCorrectedIntensities[i] - activeBotCorrectedIntensities[i];
}
Matrix matrixIntDiff = new Matrix(intDiffValues, countOfActiveData);
//for each of the fit functions - two cases = od and no od
Iterator<String> sessionFitFuncsNoOdIterator = logRatioFitFunctionsNoOD.keySet().iterator();
while (sessionFitFuncsNoOdIterator.hasNext()) {
String key = sessionFitFuncsNoOdIterator.next();
// skip downhole function
if (key.compareToIgnoreCase(FitFunctionTypeEnum.MEAN_DH.getName()) != 0) {
AbstractFunctionOfX FofX = logRatioFitFunctionsNoOD.get(key);
if (FofX != null) {
Matrix JIntp = FofX.assembleMatrixJIntp(SlogRatioX_Y);
try {
FofX.setdLrInt_dDt(JIntp.times(matrixIntDiff).get(0, 0));
} catch (Exception e) {
}
} else {
logRatioFitFunctionsNoOD.remove(key);
}
}
}
Iterator<String> sessionFitFuncsWithOdIterator = logRatioFitFunctionsWithOD.keySet().iterator();
while (sessionFitFuncsWithOdIterator.hasNext()) {
String key = sessionFitFuncsWithOdIterator.next();
// skip downhole function
if (key.compareToIgnoreCase(FitFunctionTypeEnum.MEAN_DH.getName()) != 0) {
AbstractFunctionOfX FofX = logRatioFitFunctionsWithOD.get(key);
if (FofX != null) {
double OD = FofX.getOverDispersion();
Matrix ODdiag = Matrix.identity(countOfActiveData, countOfActiveData).times(OD);
Matrix JIntp = FofX.assembleMatrixJIntp(SlogRatioX_Y.plus(ODdiag));
try {
FofX.setdLrInt_dDt(JIntp.times(matrixIntDiff).get(0, 0));
} catch (Exception e) {
}
} else {
logRatioFitFunctionsWithOD.remove(key);
}
}
}
}
}
} else {
saveSelection = FitFunctionTypeEnum.MEAN;//oct 2014 to catch failure to fit
}
if (saveSelection != null) {
selectedFitFunctionType = saveSelection;
}
System.gc();
}
}
// /**
// *
// * @param fitFunctionTypeName
// */
// @Override
// public void calculateFittedFunctions(String fitFunctionTypeName) {
//
// }
/**
*
* @return
*/
@Override
public double[] getNormalizedOnPeakAquireTimes() {
double[] normalizedAquire = new double[ratios.length];
for (int i = 0; i < normalizedAquire.length; i++) {
normalizedAquire[i] =//
((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().getOnPeakAquireTimes()[i] / COLLECTOR_DATA_FREQUENCY_MILLISECS;
}
return normalizedAquire;
}
/**
*
* @return
*/
@Override
public double[] getOnPeakAquireTimesInSeconds() {
double[] onPeakAquireTimesInSeconds = new double[ratios.length];
for (int i = 0; i < onPeakAquireTimesInSeconds.length; i++) {
onPeakAquireTimesInSeconds[i] =//
((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector().getOnPeakAquireTimes()[i] / 1000.0;
}
return onPeakAquireTimesInSeconds;
}
/**
*
*/
public void printData() {
String retVal = rawRatioName.getName() + "\n";
for (int i = 0; i < ratios.length; i++) {
retVal += Double.toString(ratios[i]) + "\n";
}
System.out.println(retVal);
}
/**
* @return the rawRatioName
*/
@Override
public RawRatioNames getRawRatioModelName() {
return rawRatioName;
}
/**
* @return the topIsotope
*/
public RawIntensityDataModel getTopIsotope() {
return (RawIntensityDataModel) topIsotope;
}
/**
* @return the botIsotope
*/
public RawIntensityDataModel getBotIsotope() {
return (RawIntensityDataModel) botIsotope;
}
/**
* @return the ratios
*/
public double[] getRatios() {
return ratios;
}
/**
* @return the alphas
*/
public double[] getAlphas() {
return alphas;
}
/**
*
* @param CollectorDataFrequencyMillisecs
*/
@Override
public void setCollectorDataFrequencyMillisecs(long CollectorDataFrequencyMillisecs) {
COLLECTOR_DATA_FREQUENCY_MILLISECS = CollectorDataFrequencyMillisecs;
}
/**
*
* @return
*/
@Override
public long getCollectorDataFrequencyMillisecs() {
return COLLECTOR_DATA_FREQUENCY_MILLISECS;
}
/**
* @return the dataActiveMap
*/
public boolean[] getDataActiveMap() {
return dataActiveMap;
}
/**
* @param dataActiveMap the dataActiveMap to set
*/
public void setDataActiveMap(boolean[] dataActiveMap) {
this.dataActiveMap = dataActiveMap;
}
/**
*
* @return
*/
@Override
public String getDataModelName() {
return rawRatioName.getDisplayName();
}
/**
* @return the usedForFractionationCorrections
*/
public boolean isUsedForFractionationCorrections() {
return usedForFractionationCorrections;
}
/**
* @param standardValue the standardValue to set
*/
public void setStandardValue(double standardValue) {
this.standardValue = standardValue;
}
/**
*
* @param sessionTechnique
* @return
*/
public double getSessionValueBySessionTechnique(String sessionTechnique) {
double retVal = 0.0;
if (sessionTechnique.compareToIgnoreCase("DOWNHOLE") == 0) {
try {
retVal = getSelectedDownHoleFitFunction().getA();//updated june 2015 getMeanOfResidualsFromFittedFractionation();
} catch (Exception e) {
retVal = 0.0;
}
} else if (sessionTechnique.compareToIgnoreCase("INTERCEPT") == 0) {
try {
retVal = getSelectedFitFunction().getYIntercept();
} catch (Exception e) {
retVal = 0.0;
}
}
return retVal;
}
/**
*
* @param sessionTechnique the value of sessionTechnique
* @return the double
*/
public double getSessionErrorBySessionTechnique(String sessionTechnique) {
double retVal = 0.0;
if (sessionTechnique.compareToIgnoreCase("DOWNHOLE") == 0) {
try {
retVal = getSelectedDownHoleFitFunction().getStdErrOfA();//updated june 2015 getStdErrOfmeanOfResidualsFromFittedFractionation();
} catch (Exception e) {
retVal = 0.0;
}
} else if (sessionTechnique.compareToIgnoreCase("INTERCEPT") == 0) {
try {
retVal = getSelectedFitFunction().getYInterceptStdErr();
} catch (Exception e) {
retVal = 0.0;
}
}
return retVal;
}
/**
*
* @param sessionTechnique
* @return
*/
public double getSessionErrorPlusODBySessionTechnique(String sessionTechnique) {
double retVal = 0.0;
if (sessionTechnique.compareToIgnoreCase("DOWNHOLE") == 0) {
try {
AbstractFunctionOfX FofX = getSelectedDownHoleFitFunction();
retVal = Math.sqrt(Math.pow(FofX.getStdErrOfA(), 2) + FofX.getOverDispersion());// updated june 2015 getStdErrOfmeanOfResidualsFromFittedFractionation();
} catch (Exception e) {
}
}
if (sessionTechnique.compareToIgnoreCase("INTERCEPT") == 0) {
try {
AbstractFunctionOfX FofX = getSelectedFitFunction();
retVal = Math.sqrt(FofX.getYInterceptVariance() + FofX.getOverDispersion());
} catch (Exception e) {
retVal = 0.0;
}
}
return retVal;
}
/**
*
* @param sessionTechnique the value of sessionTechnique
* @return the double
*/
public double getSessionVarianceBySessionTechnique(String sessionTechnique) {
double retVal = 0.0;
if (sessionTechnique.compareToIgnoreCase("DOWNHOLE") == 0) {
try {
retVal = Math.pow(getSelectedDownHoleFitFunction().getStdErrOfA(), 2); // updated june 2015 from getStdErrOfmeanOfResidualsFromFittedFractionation()
} catch (Exception e) {
retVal = 0.0;
}
} else if (sessionTechnique.compareToIgnoreCase("INTERCEPT") == 0) {
try {
retVal = getSelectedFitFunction().getYInterceptVariance();
} catch (Exception e) {
retVal = 0.0;
}
}
return retVal;
}
/**
* @return the meanOfResidualsFromFittedFractionation
*/
public double getMeanOfResidualsFromFittedFractionation() {
return meanOfResidualsFromFittedFractionation;
}
/**
* @param meanOfResidualsFromFittedFractionation the
* meanOfResidualsFromFittedFractionation to set
*/
public void setMeanOfResidualsFromFittedFractionation(double meanOfResidualsFromFittedFractionation) {
this.meanOfResidualsFromFittedFractionation = meanOfResidualsFromFittedFractionation;
}
/**
* @return the stdErrOfmeanOfResidualsFromFittedFractionation
*/
public double getStdErrOfmeanOfResidualsFromFittedFractionation() {
return stdErrOfmeanOfResidualsFromFittedFractionation;
}
/**
* @param stdErrOfmeanOfResidualsFromFittedFractionation the
* stdErrOfmeanOfResidualsFromFittedFractionation to set
*/
public void setStdErrOfmeanOfResidualsFromFittedFractionation(double stdErrOfmeanOfResidualsFromFittedFractionation) {
this.stdErrOfmeanOfResidualsFromFittedFractionation = stdErrOfmeanOfResidualsFromFittedFractionation;
}
/**
* @return the correctedRatios
*/
public double[] getCorrectedRatios() {
return correctedRatios;
}
/**
* @param correctedRatios the correctedRatios to set
*/
public void setCorrectedRatios(double[] correctedRatios) {
this.correctedRatios = correctedRatios;
}
/**
* @return the meanOfCorrectedRatios
*/
public double getMeanOfCorrectedRatios() {
if (Double.isNaN(meanOfCorrectedRatios)) {
meanOfCorrectedRatios = 0.0;
}
return meanOfCorrectedRatios;
}
/**
* @return the stdDevOfCorrectedRatios
*/
public double getStdDevOfCorrectedRatios() {
return stdDevOfCorrectedRatios;
}
/**
* @return the stdErrOfMeanCorrectedRatios
*/
public double getStdErrOfMeanCorrectedRatios() {
if (Double.isNaN(stdErrOfMeanCorrectedRatios)) {
stdErrOfMeanCorrectedRatios = 0.0;
}
return stdErrOfMeanCorrectedRatios;
}
/**
* @return the logRatioFitFunctionsNoOD
*/
@Override
public Map<String, AbstractFunctionOfX> getFitFunctions() {
//return logRatioFitFunctionsNoOD;
Map<String, AbstractFunctionOfX> fitFunctions = null;
if (overDispersionSelected) {
fitFunctions = logRatioFitFunctionsWithOD;
} else {
fitFunctions = logRatioFitFunctionsNoOD;
}
return fitFunctions;
}
/**
* @param logRatioFitFunctionsNoOD the logRatioFitFunctionsNoOD to set
*/
public void setLogRatioFitFunctionsNoOD(Map<String, AbstractFunctionOfX> logRatioFitFunctionsNoOD) {
this.logRatioFitFunctionsNoOD = logRatioFitFunctionsNoOD;
}
/**
* @return the selectedFitFunctionType
*/
@Override
public FitFunctionTypeEnum getSelectedFitFunctionType() {
return selectedFitFunctionType;
}
/**
*
* @return
*/
@Override
public AbstractFunctionOfX getSelectedFitFunction() {
AbstractFunctionOfX fitFunc;
if (overDispersionSelected) {
fitFunc = logRatioFitFunctionsWithOD.get(selectedFitFunctionType.getName());
} else {
fitFunc = logRatioFitFunctionsNoOD.get(selectedFitFunctionType.getName());
}
// march 2014
if (fitFunc == null) {
selectedFitFunctionType = FitFunctionTypeEnum.MEAN;
// fitFunc = getSelectedFitFunction();
if (overDispersionSelected) {
fitFunc = logRatioFitFunctionsWithOD.get(selectedFitFunctionType.getName());
} else {
fitFunc = logRatioFitFunctionsNoOD.get(selectedFitFunctionType.getName());
}
}
return fitFunc;
}
@Override
public AbstractFunctionOfX getSelectedDownHoleFitFunction() {
AbstractFunctionOfX fitFunc;
if (overDispersionSelectedDownHole) {
fitFunc = logRatioFitFunctionsWithOD.get(FitFunctionTypeEnum.MEAN_DH.getName());
} else {
fitFunc = logRatioFitFunctionsNoOD.get(FitFunctionTypeEnum.MEAN_DH.getName());
}
return fitFunc;
}
/**
* @param selectedFitFunctionType the selectedFitFunctionType to set
*/
@Override
public void setSelectedFitFunctionType(FitFunctionTypeEnum selectedFitFunctionType) {
this.selectedFitFunctionType = selectedFitFunctionType;
}
/**
* @return the fitFunctionLogValues
*/
public double[] getFitFunctionLogValues() {
AbstractFunctionOfX fitFunc = getSelectedFitFunction();
for (int i = 0; i < fitFunctionLogValues.length; i++) {
try {
fitFunctionLogValues[i] = fitFunc.f(topIsotope.getNormalizedOnPeakAquireTimes()[i]);
} catch (Exception e) {
fitFunctionLogValues[i] = 0.0;
}
}
return fitFunctionLogValues;
}
/**
* @return the fitFunctionLogValues
*/
public double[] getDownHoleFitFunctionLogValues() {
AbstractFunctionOfX fitFunc = getSelectedDownHoleFitFunction();
for (int i = 0; i < fitFunctionLogValues.length; i++) {
try {
fitFunctionLogValues[i] = fitFunc.f(topIsotope.getNormalizedOnPeakAquireTimes()[i]);
} catch (Exception e) {
fitFunctionLogValues[i] = 0.0;
}
}
return fitFunctionLogValues;
}
/**
* @param meanOfCorrectedRatios the meanOfCorrectedRatios to set
*/
public void setMeanOfCorrectedRatios(double meanOfCorrectedRatios) {
this.meanOfCorrectedRatios = meanOfCorrectedRatios;
}
/**
* @param stdErrOfMeanCorrectedRatios the stdErrOfMeanCorrectedRatios to set
*/
public void setStdErrOfMeanCorrectedRatios(double stdErrOfMeanCorrectedRatios) {
this.stdErrOfMeanCorrectedRatios = stdErrOfMeanCorrectedRatios;
}
/**
* @return the standardValue
*/
@Override
public double getStandardValue() {
return standardValue;
}
/**
* @param usedForFractionationCorrections the
* usedForFractionationCorrections to set
*/
public void setUsedForFractionationCorrections(boolean usedForFractionationCorrections) {
this.usedForFractionationCorrections = usedForFractionationCorrections;
}
/**
* @return the belowDetection
*/
@Override
public boolean isBelowDetection() {
return belowDetection;
}
/**
* @param belowDetection the belowDetection to set
*/
public void setBelowDetection(boolean belowDetection) {
this.belowDetection = belowDetection;
}
/**
* @param resetMatrix the value of resetMatrix
* @return the Jama.Matrix
*/
public Matrix getSlogRatioX_Y(boolean resetMatrix) {
if ((resetMatrix) || (SlogRatioX_Y == null)) {
calculateSlogRatioX_Y();
}
return SlogRatioX_Y;
}
/**
* To support downhole
*
* @return
*/
public Matrix getSlogRatioX_Y_withZeroesAtInactive() {
// ignore shades - shades will be false only at left end and right end, already ignored by downhole fit function
boolean[] shades = MaskingSingleton.getInstance().getMaskingArray();
// collect shade and inactive indices
ArrayList<Integer> shadeIndices = new ArrayList<>();
ArrayList<Integer> inactiveIndices = new ArrayList<>();
for (int i = 0; i < dataActiveMap.length; i++) {
if (!shades[i]) {
shadeIndices.add(i);
} else if (!dataActiveMap[i]) {
inactiveIndices.add(i);
}
}
////// // zero out rowcol for inactive acquisitions
Matrix slogRatioX_Y_withZeroesAtInactive = SlogRatioX_Yfull.copy();
////// for (Integer rowCol : inactiveIndices) {
////// for (int i = 0; i < SlogRatioX_Yfull.getRowDimension(); i++) {
////// if (i != rowCol) {
////// slogRatioX_Y_withZeroesAtInactive.set(rowCol, i, 0.0);
////// slogRatioX_Y_withZeroesAtInactive.set(i, rowCol, 0.0);
////// }
////// }
////// }
// remove row and col of matrix sf corresponding to shadeIndices
if (shadeIndices.size() > 0) {
// reverse list of indices to remove to avoid counting errors
Collections.sort(shadeIndices, (Integer i1, Integer i2) -> Integer.compare(i2, i1));
// walk the list of indices to remove and remove rows and cols before insertion
for (Integer indexToRemove : shadeIndices) {
slogRatioX_Y_withZeroesAtInactive = MatrixRemover.removeRow(slogRatioX_Y_withZeroesAtInactive, indexToRemove);
slogRatioX_Y_withZeroesAtInactive = MatrixRemover.removeCol(slogRatioX_Y_withZeroesAtInactive, indexToRemove);
}
}
return slogRatioX_Y_withZeroesAtInactive;
}
/**
* @param SlogRatioX_Y the SlogRatioX_Y to set
*/
public void setSlogRatioX_Y(Matrix SlogRatioX_Y) {
this.SlogRatioX_Y = SlogRatioX_Y;
}
/**
* @return the matrixSxyod
*/
public Matrix getMatrixSxyod() {
return matrixSxyod;
}
/**
* @param matrixSxyod the matrixSxyod to set
*/
public void setMatrixSxyod(Matrix matrixSxyod) {
this.matrixSxyod = matrixSxyod;
}
/**
* @return the logRatios
*/
public double[] getLogRatios() {
return logRatios;
}
/**
*
* @param activeCount
* @return
*/
public double[] getActiveLogRatios(int activeCount) {
double[] activeLogatios = new double[activeCount];
int index = 0;
for (int i = 0; i < dataActiveMap.length; i++) {
if (dataActiveMap[i]) {
activeLogatios[index] = logRatios[i];
index++;
}
}
return activeLogatios;
}
public Matrix SlogRXYSolveLRWithZeroesAtInactive(boolean[] dataCommonActiveMap) {
// take the SLogRatioXYALL and solve it with logRatiosVector
/// then remove row for left and right shades
// then zero the row for inactive points for this fraction
ArrayList<Integer> shadeIndices = new ArrayList<>();
ArrayList<Integer> inactiveIndices = new ArrayList<>();
for (int i = 0; i < dataActiveMap.length; i++) {
if (!dataCommonActiveMap[i]) {
shadeIndices.add(i);
} else if (!dataActiveMap[i]) {
inactiveIndices.add(i);
}
}
// make a col vector from logratios
Matrix logRatioColVector = new Matrix(logRatios, logRatios.length);
// solve making another column vector
Matrix SlogRXYSolveLRWithZeroesAtInactive = SlogRatioX_Yfull.solve(logRatioColVector);
// //zero out missing points
// for (int index = 0; index < inactiveIndices.size(); index++) {
// SlogRXYSolveLRWithZeroesAtInactive.set(inactiveIndices.get(index), 0, 0.0);
// }
// remove shaded points
if (shadeIndices.size() > 0) {
// reverse list of indices to remove to avoid counting errors
Collections.sort(shadeIndices, (Integer i1, Integer i2) -> Integer.compare(i2, i1));
// walk the list of indices to remove and remove rows
for (Integer indexToRemove : shadeIndices) {
SlogRXYSolveLRWithZeroesAtInactive = MatrixRemover.removeRow(SlogRXYSolveLRWithZeroesAtInactive, indexToRemove);
}
}
return SlogRXYSolveLRWithZeroesAtInactive;
}
/**
*
* @return
*/
public boolean hasTwoIdenticalIonCounters() {
return (topIsotope.getCollectorModel()//
.equals(botIsotope.getCollectorModel()));
}
/**
*
* @return
*/
public double getdLr_dDt() {
return ((RawIntensityDataModel) topIsotope).getOnPeakVirtualCollector()//
.getSumOfCorrectedOnPeakIntensities()//
- ((RawIntensityDataModel) botIsotope).getOnPeakVirtualCollector()//
.getSumOfCorrectedOnPeakIntensities();
}
/**
*
* @return
*/
@Override
public boolean isCalculatedInitialFitFunctions() {
return calculatedInitialFitFunctions;
}
/**
*
* @param fitFunctionType
* @return
*/
@Override
public boolean containsFitFunction(FitFunctionTypeEnum fitFunctionType) {
boolean contains = false;
if (fitFunctionType.compareTo(FitFunctionTypeEnum.MEAN_DH) == 0) {
if (overDispersionSelectedDownHole) {
contains = logRatioFitFunctionsWithOD.get(fitFunctionType.getName()) != null;
} else {
contains = logRatioFitFunctionsNoOD.get(fitFunctionType.getName()) != null;
}
} else if (overDispersionSelected) {
contains = logRatioFitFunctionsWithOD.get(fitFunctionType.getName()) != null;
} else {
contains = logRatioFitFunctionsNoOD.get(fitFunctionType.getName()) != null;
}
return contains;
}
/**
*
* @return
*/
@Override
public boolean isOverDispersionSelected() {
return overDispersionSelected;
}
@Override
public void setOverDispersionSelected(boolean overDispersionSelected) {
this.overDispersionSelected = overDispersionSelected;
}
/**
*
* @param fitFunctionType
* @return
*/
@Override
public boolean doesFitFunctionTypeHaveOD(FitFunctionTypeEnum fitFunctionType) {
boolean retVal = false;
AbstractFunctionOfX fitFunc = logRatioFitFunctionsWithOD.get(fitFunctionType.getName());
if (fitFunc != null) {
retVal = fitFunc.isOverDispersionSelected();
}
return retVal;
}
/**
*
* @param fitFunctionType
* @return
*/
@Override
public double getXIforFitFunction(FitFunctionTypeEnum fitFunctionType) {
double retVal = 0.0;
if (doesFitFunctionTypeHaveOD(fitFunctionType)) {
return Math.sqrt(logRatioFitFunctionsWithOD.get(fitFunctionType.getName()).getOverDispersion());
}
return retVal;
}
/**
*
* @return
*/
@Override
public AbstractCollectorModel getCollectorModel() {
return null;
}
/**
* @param usedForCommonLeadCorrections the usedForCommonLeadCorrections to
* set
*/
public void setUsedForCommonLeadCorrections(boolean usedForCommonLeadCorrections) {
this.usedForCommonLeadCorrections = usedForCommonLeadCorrections;
}
/**
* @return the usedForCommonLeadCorrections
*/
@Override
public boolean isUsedForCommonLeadCorrections() {
return usedForCommonLeadCorrections;
}
/**
*
* @return
*/
@Override
public boolean isForceMeanForCommonLeadRatios() {
// System.out.println("BOTTOM " + botIsotope.getDataModelName());
return botIsotope.isForceMeanForCommonLeadRatios();
}
/**
* @return the topSopbclr
*/
public Matrix getTopSopbclr() {
return topSopbclr;
}
/**
* @return the botSopbclr
*/
public Matrix getBotSopbclr() {
return botSopbclr;
}
/**
* @return the USING_FULL_PROPAGATION
*/
public boolean isUSING_FULL_PROPAGATION() {
return USING_FULL_PROPAGATION;
}
/**
* @param USING_FULL_PROPAGATION the USING_FULL_PROPAGATION to set
*/
public void setUSING_FULL_PROPAGATION(boolean USING_FULL_PROPAGATION) {
this.USING_FULL_PROPAGATION = USING_FULL_PROPAGATION;
}
/**
* @return the logDifferencesFromWeightedMean
*/
public double[] getLogDifferencesFromWeightedMean() {
return logDifferencesFromWeightedMean;
}
/**
* @return the overDispersionSelectedDownHole
*/
public boolean isOverDispersionSelectedDownHole() {
return overDispersionSelectedDownHole;
}
/**
* @param overDispersionSelectedDownHole the overDispersionSelectedDownHole
* to set
*/
public void setOverDispersionSelectedDownHole(boolean overDispersionSelectedDownHole) {
this.overDispersionSelectedDownHole = overDispersionSelectedDownHole;
}
/**
* @return the downHoleFitFunction
*/
public AbstractFunctionOfX getDownHoleFitFunction() {
return downHoleFitFunction;
}
/**
* @param downHoleFitFunction the downHoleFitFunction to set
*/
public void setDownHoleFitFunction(AbstractFunctionOfX downHoleFitFunction) {
this.downHoleFitFunction = downHoleFitFunction;
}
}
| |
package lt.nsg.jdbcglass.resultset;
import lt.nsg.jdbcglass.core.Wrappable;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.*;
import java.util.Calendar;
import java.util.Map;
public abstract class AbstractResultSetProxy extends Wrappable implements ResultSet {
private final ResultSet resultSet;
private final Statement originalStatement;
public AbstractResultSetProxy(ResultSet resultSet, Statement originalStatement) {
super(resultSet);
this.resultSet = resultSet;
this.originalStatement = originalStatement;
}
public boolean isTargetProxyOf(ResultSet resultSet) {
return this.resultSet == resultSet;
}
protected ResultSet getResultSet() {
return this.resultSet;
}
@Override
public boolean next() throws SQLException {
return resultSet.next();
}
@Override
public void close() throws SQLException {
resultSet.close();
}
@Override
public boolean wasNull() throws SQLException {
return resultSet.wasNull();
}
@Override
public String getString(int columnIndex) throws SQLException {
return resultSet.getString(columnIndex);
}
@Override
public boolean getBoolean(int columnIndex) throws SQLException {
return resultSet.getBoolean(columnIndex);
}
@Override
public byte getByte(int columnIndex) throws SQLException {
return resultSet.getByte(columnIndex);
}
@Override
public short getShort(int columnIndex) throws SQLException {
return resultSet.getShort(columnIndex);
}
@Override
public int getInt(int columnIndex) throws SQLException {
return resultSet.getInt(columnIndex);
}
@Override
public long getLong(int columnIndex) throws SQLException {
return resultSet.getLong(columnIndex);
}
@Override
public float getFloat(int columnIndex) throws SQLException {
return resultSet.getFloat(columnIndex);
}
@Override
public double getDouble(int columnIndex) throws SQLException {
return resultSet.getDouble(columnIndex);
}
@Override
@SuppressWarnings("deprecation")
public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException {
return resultSet.getBigDecimal(columnIndex, scale);
}
@Override
public byte[] getBytes(int columnIndex) throws SQLException {
return resultSet.getBytes(columnIndex);
}
@Override
public Date getDate(int columnIndex) throws SQLException {
return resultSet.getDate(columnIndex);
}
@Override
public Time getTime(int columnIndex) throws SQLException {
return resultSet.getTime(columnIndex);
}
@Override
public Timestamp getTimestamp(int columnIndex) throws SQLException {
return resultSet.getTimestamp(columnIndex);
}
@Override
public InputStream getAsciiStream(int columnIndex) throws SQLException {
return resultSet.getAsciiStream(columnIndex);
}
@Override
@SuppressWarnings("deprecation")
public InputStream getUnicodeStream(int columnIndex) throws SQLException {
return resultSet.getUnicodeStream(columnIndex);
}
@Override
public InputStream getBinaryStream(int columnIndex) throws SQLException {
return resultSet.getBinaryStream(columnIndex);
}
@Override
public String getString(String columnLabel) throws SQLException {
return resultSet.getString(columnLabel);
}
@Override
public boolean getBoolean(String columnLabel) throws SQLException {
return resultSet.getBoolean(columnLabel);
}
@Override
public byte getByte(String columnLabel) throws SQLException {
return resultSet.getByte(columnLabel);
}
@Override
public short getShort(String columnLabel) throws SQLException {
return resultSet.getShort(columnLabel);
}
@Override
public int getInt(String columnLabel) throws SQLException {
return resultSet.getInt(columnLabel);
}
@Override
public long getLong(String columnLabel) throws SQLException {
return resultSet.getLong(columnLabel);
}
@Override
public float getFloat(String columnLabel) throws SQLException {
return resultSet.getFloat(columnLabel);
}
@Override
public double getDouble(String columnLabel) throws SQLException {
return resultSet.getDouble(columnLabel);
}
@Override
@SuppressWarnings("deprecation")
public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException {
return resultSet.getBigDecimal(columnLabel, scale);
}
@Override
public byte[] getBytes(String columnLabel) throws SQLException {
return resultSet.getBytes(columnLabel);
}
@Override
public Date getDate(String columnLabel) throws SQLException {
return resultSet.getDate(columnLabel);
}
@Override
public Time getTime(String columnLabel) throws SQLException {
return resultSet.getTime(columnLabel);
}
@Override
public Timestamp getTimestamp(String columnLabel) throws SQLException {
return resultSet.getTimestamp(columnLabel);
}
@Override
public InputStream getAsciiStream(String columnLabel) throws SQLException {
return resultSet.getAsciiStream(columnLabel);
}
@Override
@SuppressWarnings("deprecation")
public InputStream getUnicodeStream(String columnLabel) throws SQLException {
return resultSet.getUnicodeStream(columnLabel);
}
@Override
public InputStream getBinaryStream(String columnLabel) throws SQLException {
return resultSet.getBinaryStream(columnLabel);
}
@Override
public SQLWarning getWarnings() throws SQLException {
return resultSet.getWarnings();
}
@Override
public void clearWarnings() throws SQLException {
resultSet.clearWarnings();
}
@Override
public String getCursorName() throws SQLException {
return resultSet.getCursorName();
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
return resultSet.getMetaData();
}
@Override
public Object getObject(int columnIndex) throws SQLException {
return resultSet.getObject(columnIndex);
}
@Override
public Object getObject(String columnLabel) throws SQLException {
return resultSet.getObject(columnLabel);
}
@Override
public int findColumn(String columnLabel) throws SQLException {
return resultSet.findColumn(columnLabel);
}
@Override
public Reader getCharacterStream(int columnIndex) throws SQLException {
return resultSet.getCharacterStream(columnIndex);
}
@Override
public Reader getCharacterStream(String columnLabel) throws SQLException {
return resultSet.getCharacterStream(columnLabel);
}
@Override
public BigDecimal getBigDecimal(int columnIndex) throws SQLException {
return resultSet.getBigDecimal(columnIndex);
}
@Override
public BigDecimal getBigDecimal(String columnLabel) throws SQLException {
return resultSet.getBigDecimal(columnLabel);
}
@Override
public boolean isBeforeFirst() throws SQLException {
return resultSet.isBeforeFirst();
}
@Override
public boolean isAfterLast() throws SQLException {
return resultSet.isAfterLast();
}
@Override
public boolean isFirst() throws SQLException {
return resultSet.isFirst();
}
@Override
public boolean isLast() throws SQLException {
return resultSet.isLast();
}
@Override
public void beforeFirst() throws SQLException {
resultSet.beforeFirst();
}
@Override
public void afterLast() throws SQLException {
resultSet.afterLast();
}
@Override
public boolean first() throws SQLException {
return resultSet.first();
}
@Override
public boolean last() throws SQLException {
return resultSet.last();
}
@Override
public int getRow() throws SQLException {
return resultSet.getRow();
}
@Override
public boolean absolute(int row) throws SQLException {
return resultSet.absolute(row);
}
@Override
public boolean relative(int rows) throws SQLException {
return resultSet.relative(rows);
}
@Override
public boolean previous() throws SQLException {
return resultSet.previous();
}
@Override
public void setFetchDirection(int direction) throws SQLException {
resultSet.setFetchDirection(direction);
}
@Override
public int getFetchDirection() throws SQLException {
return resultSet.getFetchDirection();
}
@Override
public void setFetchSize(int rows) throws SQLException {
resultSet.setFetchSize(rows);
}
@Override
public int getFetchSize() throws SQLException {
return resultSet.getFetchSize();
}
@Override
public int getType() throws SQLException {
return resultSet.getType();
}
@Override
public int getConcurrency() throws SQLException {
return resultSet.getConcurrency();
}
@Override
public boolean rowUpdated() throws SQLException {
return resultSet.rowUpdated();
}
@Override
public boolean rowInserted() throws SQLException {
return resultSet.rowInserted();
}
@Override
public boolean rowDeleted() throws SQLException {
return resultSet.rowDeleted();
}
@Override
public void updateNull(int columnIndex) throws SQLException {
resultSet.updateNull(columnIndex);
}
@Override
public void updateBoolean(int columnIndex, boolean x) throws SQLException {
resultSet.updateBoolean(columnIndex, x);
}
@Override
public void updateByte(int columnIndex, byte x) throws SQLException {
resultSet.updateByte(columnIndex, x);
}
@Override
public void updateShort(int columnIndex, short x) throws SQLException {
resultSet.updateShort(columnIndex, x);
}
@Override
public void updateInt(int columnIndex, int x) throws SQLException {
resultSet.updateInt(columnIndex, x);
}
@Override
public void updateLong(int columnIndex, long x) throws SQLException {
resultSet.updateLong(columnIndex, x);
}
@Override
public void updateFloat(int columnIndex, float x) throws SQLException {
resultSet.updateFloat(columnIndex, x);
}
@Override
public void updateDouble(int columnIndex, double x) throws SQLException {
resultSet.updateDouble(columnIndex, x);
}
@Override
public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException {
resultSet.updateBigDecimal(columnIndex, x);
}
@Override
public void updateString(int columnIndex, String x) throws SQLException {
resultSet.updateString(columnIndex, x);
}
@Override
public void updateBytes(int columnIndex, byte[] x) throws SQLException {
resultSet.updateBytes(columnIndex, x);
}
@Override
public void updateDate(int columnIndex, Date x) throws SQLException {
resultSet.updateDate(columnIndex, x);
}
@Override
public void updateTime(int columnIndex, Time x) throws SQLException {
resultSet.updateTime(columnIndex, x);
}
@Override
public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException {
resultSet.updateTimestamp(columnIndex, x);
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException {
resultSet.updateAsciiStream(columnIndex, x, length);
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException {
resultSet.updateBinaryStream(columnIndex, x, length);
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException {
resultSet.updateCharacterStream(columnIndex, x, length);
}
@Override
public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException {
resultSet.updateObject(columnIndex, x, scaleOrLength);
}
@Override
public void updateObject(int columnIndex, Object x) throws SQLException {
resultSet.updateObject(columnIndex, x);
}
@Override
public void updateNull(String columnLabel) throws SQLException {
resultSet.updateNull(columnLabel);
}
@Override
public void updateBoolean(String columnLabel, boolean x) throws SQLException {
resultSet.updateBoolean(columnLabel, x);
}
@Override
public void updateByte(String columnLabel, byte x) throws SQLException {
resultSet.updateByte(columnLabel, x);
}
@Override
public void updateShort(String columnLabel, short x) throws SQLException {
resultSet.updateShort(columnLabel, x);
}
@Override
public void updateInt(String columnLabel, int x) throws SQLException {
resultSet.updateInt(columnLabel, x);
}
@Override
public void updateLong(String columnLabel, long x) throws SQLException {
resultSet.updateLong(columnLabel, x);
}
@Override
public void updateFloat(String columnLabel, float x) throws SQLException {
resultSet.updateFloat(columnLabel, x);
}
@Override
public void updateDouble(String columnLabel, double x) throws SQLException {
resultSet.updateDouble(columnLabel, x);
}
@Override
public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException {
resultSet.updateBigDecimal(columnLabel, x);
}
@Override
public void updateString(String columnLabel, String x) throws SQLException {
resultSet.updateString(columnLabel, x);
}
@Override
public void updateBytes(String columnLabel, byte[] x) throws SQLException {
resultSet.updateBytes(columnLabel, x);
}
@Override
public void updateDate(String columnLabel, Date x) throws SQLException {
resultSet.updateDate(columnLabel, x);
}
@Override
public void updateTime(String columnLabel, Time x) throws SQLException {
resultSet.updateTime(columnLabel, x);
}
@Override
public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException {
resultSet.updateTimestamp(columnLabel, x);
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException {
resultSet.updateAsciiStream(columnLabel, x, length);
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException {
resultSet.updateBinaryStream(columnLabel, x, length);
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException {
resultSet.updateCharacterStream(columnLabel, reader, length);
}
@Override
public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException {
resultSet.updateObject(columnLabel, x, scaleOrLength);
}
@Override
public void updateObject(String columnLabel, Object x) throws SQLException {
resultSet.updateObject(columnLabel, x);
}
@Override
public void insertRow() throws SQLException {
resultSet.insertRow();
}
@Override
public void updateRow() throws SQLException {
resultSet.updateRow();
}
@Override
public void deleteRow() throws SQLException {
resultSet.deleteRow();
}
@Override
public void refreshRow() throws SQLException {
resultSet.refreshRow();
}
@Override
public void cancelRowUpdates() throws SQLException {
resultSet.cancelRowUpdates();
}
@Override
public void moveToInsertRow() throws SQLException {
resultSet.moveToInsertRow();
}
@Override
public void moveToCurrentRow() throws SQLException {
resultSet.moveToCurrentRow();
}
@Override
public Statement getStatement() throws SQLException {
return this.originalStatement;
}
@Override
public Object getObject(int columnIndex, Map<String, Class<?>> map) throws SQLException {
return resultSet.getObject(columnIndex, map);
}
@Override
public Ref getRef(int columnIndex) throws SQLException {
return resultSet.getRef(columnIndex);
}
@Override
public Blob getBlob(int columnIndex) throws SQLException {
return resultSet.getBlob(columnIndex);
}
@Override
public Clob getClob(int columnIndex) throws SQLException {
return resultSet.getClob(columnIndex);
}
@Override
public Array getArray(int columnIndex) throws SQLException {
return resultSet.getArray(columnIndex);
}
@Override
public Object getObject(String columnLabel, Map<String, Class<?>> map) throws SQLException {
return resultSet.getObject(columnLabel, map);
}
@Override
public Ref getRef(String columnLabel) throws SQLException {
return resultSet.getRef(columnLabel);
}
@Override
public Blob getBlob(String columnLabel) throws SQLException {
return resultSet.getBlob(columnLabel);
}
@Override
public Clob getClob(String columnLabel) throws SQLException {
return resultSet.getClob(columnLabel);
}
@Override
public Array getArray(String columnLabel) throws SQLException {
return resultSet.getArray(columnLabel);
}
@Override
public Date getDate(int columnIndex, Calendar cal) throws SQLException {
return resultSet.getDate(columnIndex, cal);
}
@Override
public Date getDate(String columnLabel, Calendar cal) throws SQLException {
return resultSet.getDate(columnLabel, cal);
}
@Override
public Time getTime(int columnIndex, Calendar cal) throws SQLException {
return resultSet.getTime(columnIndex, cal);
}
@Override
public Time getTime(String columnLabel, Calendar cal) throws SQLException {
return resultSet.getTime(columnLabel, cal);
}
@Override
public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException {
return resultSet.getTimestamp(columnIndex, cal);
}
@Override
public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException {
return resultSet.getTimestamp(columnLabel, cal);
}
@Override
public URL getURL(int columnIndex) throws SQLException {
return resultSet.getURL(columnIndex);
}
@Override
public URL getURL(String columnLabel) throws SQLException {
return resultSet.getURL(columnLabel);
}
@Override
public void updateRef(int columnIndex, Ref x) throws SQLException {
resultSet.updateRef(columnIndex, x);
}
@Override
public void updateRef(String columnLabel, Ref x) throws SQLException {
resultSet.updateRef(columnLabel, x);
}
@Override
public void updateBlob(int columnIndex, Blob x) throws SQLException {
resultSet.updateBlob(columnIndex, x);
}
@Override
public void updateBlob(String columnLabel, Blob x) throws SQLException {
resultSet.updateBlob(columnLabel, x);
}
@Override
public void updateClob(int columnIndex, Clob x) throws SQLException {
resultSet.updateClob(columnIndex, x);
}
@Override
public void updateClob(String columnLabel, Clob x) throws SQLException {
resultSet.updateClob(columnLabel, x);
}
@Override
public void updateArray(int columnIndex, Array x) throws SQLException {
resultSet.updateArray(columnIndex, x);
}
@Override
public void updateArray(String columnLabel, Array x) throws SQLException {
resultSet.updateArray(columnLabel, x);
}
@Override
public RowId getRowId(int columnIndex) throws SQLException {
return resultSet.getRowId(columnIndex);
}
@Override
public RowId getRowId(String columnLabel) throws SQLException {
return resultSet.getRowId(columnLabel);
}
@Override
public void updateRowId(int columnIndex, RowId x) throws SQLException {
resultSet.updateRowId(columnIndex, x);
}
@Override
public void updateRowId(String columnLabel, RowId x) throws SQLException {
resultSet.updateRowId(columnLabel, x);
}
@Override
public int getHoldability() throws SQLException {
return resultSet.getHoldability();
}
@Override
public boolean isClosed() throws SQLException {
return resultSet.isClosed();
}
@Override
public void updateNString(int columnIndex, String nString) throws SQLException {
resultSet.updateNString(columnIndex, nString);
}
@Override
public void updateNString(String columnLabel, String nString) throws SQLException {
resultSet.updateNString(columnLabel, nString);
}
@Override
public void updateNClob(int columnIndex, NClob nClob) throws SQLException {
resultSet.updateNClob(columnIndex, nClob);
}
@Override
public void updateNClob(String columnLabel, NClob nClob) throws SQLException {
resultSet.updateNClob(columnLabel, nClob);
}
@Override
public NClob getNClob(int columnIndex) throws SQLException {
return resultSet.getNClob(columnIndex);
}
@Override
public NClob getNClob(String columnLabel) throws SQLException {
return resultSet.getNClob(columnLabel);
}
@Override
public SQLXML getSQLXML(int columnIndex) throws SQLException {
return resultSet.getSQLXML(columnIndex);
}
@Override
public SQLXML getSQLXML(String columnLabel) throws SQLException {
return resultSet.getSQLXML(columnLabel);
}
@Override
public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException {
resultSet.updateSQLXML(columnIndex, xmlObject);
}
@Override
public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException {
resultSet.updateSQLXML(columnLabel, xmlObject);
}
@Override
public String getNString(int columnIndex) throws SQLException {
return resultSet.getNString(columnIndex);
}
@Override
public String getNString(String columnLabel) throws SQLException {
return resultSet.getNString(columnLabel);
}
@Override
public Reader getNCharacterStream(int columnIndex) throws SQLException {
return resultSet.getNCharacterStream(columnIndex);
}
@Override
public Reader getNCharacterStream(String columnLabel) throws SQLException {
return resultSet.getNCharacterStream(columnLabel);
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
resultSet.updateNCharacterStream(columnIndex, x, length);
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
resultSet.updateNCharacterStream(columnLabel, reader, length);
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException {
resultSet.updateAsciiStream(columnIndex, x, length);
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException {
resultSet.updateBinaryStream(columnIndex, x, length);
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
resultSet.updateCharacterStream(columnIndex, x, length);
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException {
resultSet.updateAsciiStream(columnLabel, x, length);
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException {
resultSet.updateBinaryStream(columnLabel, x, length);
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
resultSet.updateCharacterStream(columnLabel, reader, length);
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException {
resultSet.updateBlob(columnIndex, inputStream, length);
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException {
resultSet.updateBlob(columnLabel, inputStream, length);
}
@Override
public void updateClob(int columnIndex, Reader reader, long length) throws SQLException {
resultSet.updateClob(columnIndex, reader, length);
}
@Override
public void updateClob(String columnLabel, Reader reader, long length) throws SQLException {
resultSet.updateClob(columnLabel, reader, length);
}
@Override
public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException {
resultSet.updateNClob(columnIndex, reader, length);
}
@Override
public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException {
resultSet.updateNClob(columnLabel, reader, length);
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException {
resultSet.updateNCharacterStream(columnIndex, x);
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException {
resultSet.updateNCharacterStream(columnLabel, reader);
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException {
resultSet.updateAsciiStream(columnIndex, x);
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException {
resultSet.updateBinaryStream(columnIndex, x);
}
@Override
public void updateCharacterStream(int columnIndex, Reader x) throws SQLException {
resultSet.updateCharacterStream(columnIndex, x);
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException {
resultSet.updateAsciiStream(columnLabel, x);
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException {
resultSet.updateBinaryStream(columnLabel, x);
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException {
resultSet.updateCharacterStream(columnLabel, reader);
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException {
resultSet.updateBlob(columnIndex, inputStream);
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException {
resultSet.updateBlob(columnLabel, inputStream);
}
@Override
public void updateClob(int columnIndex, Reader reader) throws SQLException {
resultSet.updateClob(columnIndex, reader);
}
@Override
public void updateClob(String columnLabel, Reader reader) throws SQLException {
resultSet.updateClob(columnLabel, reader);
}
@Override
public void updateNClob(int columnIndex, Reader reader) throws SQLException {
resultSet.updateNClob(columnIndex, reader);
}
@Override
public void updateNClob(String columnLabel, Reader reader) throws SQLException {
resultSet.updateNClob(columnLabel, reader);
}
@Override
public <T> T getObject(int columnIndex, Class<T> type) throws SQLException {
return resultSet.getObject(columnIndex, type);
}
@Override
public <T> T getObject(String columnLabel, Class<T> type) throws SQLException {
return resultSet.getObject(columnLabel, type);
}
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skylarkbuildapi.android;
import com.google.devtools.build.lib.skylarkbuildapi.FileApi;
import com.google.devtools.build.lib.skylarkbuildapi.FilesToRunProviderApi;
import com.google.devtools.build.lib.skylarkbuildapi.ProviderApi;
import com.google.devtools.build.lib.skylarkbuildapi.StructApi;
import com.google.devtools.build.lib.skylarkbuildapi.TransitiveInfoCollectionApi;
import com.google.devtools.build.lib.skylarkinterface.Param;
import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable;
import com.google.devtools.build.lib.skylarkinterface.SkylarkConstructor;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.lib.syntax.EvalException;
import javax.annotation.Nullable;
/**
* Configured targets implementing this provider can contribute Android Sdk information to the
* compilation.
*/
@SkylarkModule(
name = "AndroidSdkInfo",
doc =
"Do not use this module. It is intended for migration purposes only. If you depend on it, "
+ "you will be broken when it is removed.",
documented = false)
public interface AndroidSdkProviderApi<
FileT extends FileApi,
FilesToRunProviderT extends FilesToRunProviderApi<FileT>,
TransT extends TransitiveInfoCollectionApi>
extends StructApi {
/** Name of this info object. */
String NAME = "AndroidSdkInfo";
/** The value of build_tools_version. May be null or empty. */
@SkylarkCallable(name = "build_tools_version", structField = true, doc = "", documented = false)
String getBuildToolsVersion();
@SkylarkCallable(
name = "framework_aidl",
structField = true,
doc = "",
documented = false,
allowReturnNones = true)
FileT getFrameworkAidl();
@SkylarkCallable(
name = "aidl_lib",
structField = true,
doc = "",
documented = false,
allowReturnNones = true)
@Nullable
TransT getAidlLib();
@SkylarkCallable(name = "android_jar", structField = true, doc = "", documented = false)
FileT getAndroidJar();
@SkylarkCallable(
name = "source_properties",
structField = true,
doc = "",
documented = false,
allowReturnNones = true)
@Nullable
FileT getSourceProperties();
@SkylarkCallable(name = "shrinked_android_jar", structField = true, doc = "", documented = false)
FileT getShrinkedAndroidJar();
@SkylarkCallable(name = "main_dex_classes", structField = true, doc = "", documented = false)
FileT getMainDexClasses();
@SkylarkCallable(name = "adb", structField = true, doc = "", documented = false)
FilesToRunProviderT getAdb();
@SkylarkCallable(name = "dx", structField = true, doc = "", documented = false)
FilesToRunProviderT getDx();
@SkylarkCallable(name = "main_dex_list_creator", structField = true, doc = "", documented = false)
FilesToRunProviderT getMainDexListCreator();
@SkylarkCallable(name = "aidl", structField = true, doc = "", documented = false)
FilesToRunProviderT getAidl();
@SkylarkCallable(name = "aapt", structField = true, doc = "", documented = false)
FilesToRunProviderT getAapt();
@SkylarkCallable(
name = "aapt2",
structField = true,
doc = "",
documented = false,
allowReturnNones = true)
@Nullable
FilesToRunProviderT getAapt2();
@SkylarkCallable(
name = "apk_builder",
structField = true,
doc = "",
documented = false,
allowReturnNones = true)
@Nullable
FilesToRunProviderT getApkBuilder();
@SkylarkCallable(name = "apk_signer", structField = true, doc = "", documented = false)
FilesToRunProviderT getApkSigner();
@SkylarkCallable(name = "proguard", structField = true, doc = "", documented = false)
FilesToRunProviderT getProguard();
@SkylarkCallable(name = "zip_align", structField = true, doc = "", documented = false)
FilesToRunProviderT getZipalign();
/** The provider implementing this can construct the AndroidSdkInfo provider. */
@SkylarkModule(
name = "Provider",
doc =
"Do not use this module. It is intended for migration purposes only. If you depend on "
+ "it, you will be broken when it is removed.",
documented = false)
public interface Provider<
FileT extends FileApi,
FilesToRunProviderT extends FilesToRunProviderApi<FileT>,
TransT extends TransitiveInfoCollectionApi>
extends ProviderApi {
@SkylarkCallable(
name = NAME,
doc = "The <code>AndroidSdkInfo</code> constructor.",
documented = false,
parameters = {
@Param(
name = "build_tools_version",
doc = "A string of the build tools version.",
positional = true,
named = false,
type = String.class),
@Param(
name = "framework_aidl",
doc = "An artifact of the AIDL framework.",
positional = true,
named = false,
type = FileApi.class),
@Param(
name = "aidl_lib",
doc = "A transitive info collection of the AIDL lib.",
positional = true,
named = false,
type = TransitiveInfoCollectionApi.class,
noneable = true),
@Param(
name = "android_jar",
doc = "An artifact of the Android Jar.",
positional = true,
named = false,
type = FileApi.class),
@Param(
name = "sourceProperties",
doc = "An artifact of the AIDL lib.",
positional = true,
named = false,
type = FileApi.class,
noneable = true),
@Param(
name = "shrinked_android_jar",
doc = "An artifact of the shrunk Android Jar.",
positional = true,
named = false,
type = FileApi.class),
@Param(
name = "main_dex_classes",
doc = "An artifact of the main dex classes.",
positional = true,
named = false,
type = FileApi.class),
@Param(
name = "adb",
doc = "A files to run provider of ADB.",
positional = true,
named = false,
type = FilesToRunProviderApi.class),
@Param(
name = "dx",
doc = "A files to run provider of Dx.",
positional = true,
named = false,
type = FilesToRunProviderApi.class),
@Param(
name = "main_dex_list_creator",
doc = "A files to run provider of the main dex list creator.",
positional = true,
named = false,
type = FilesToRunProviderApi.class),
@Param(
name = "aidl",
doc = "A files to run provider of AIDL.",
positional = true,
named = false,
type = FilesToRunProviderApi.class),
@Param(
name = "aapt",
doc = "A files to run provider of AAPT.",
positional = true,
named = false,
type = FilesToRunProviderApi.class),
@Param(
name = "aapt2",
doc = "A files to run provider of AAPT2.",
positional = true,
named = false,
type = FilesToRunProviderApi.class,
noneable = true),
@Param(
name = "apk_builder",
doc = "A files to run provider of the Apk builder.",
positional = true,
named = false,
type = FilesToRunProviderApi.class,
noneable = true),
@Param(
name = "apk_signer",
doc = "A files to run provider of the Apk signer.",
positional = true,
named = false,
type = FilesToRunProviderApi.class),
@Param(
name = "proguard",
doc = "A files to run provider of Proguard.",
positional = true,
named = false,
type = FilesToRunProviderApi.class),
@Param(
name = "zipalign",
doc = "A files to run provider of Zipalign.",
positional = true,
named = false,
type = FilesToRunProviderApi.class),
},
selfCall = true)
@SkylarkConstructor(objectType = AndroidSdkProviderApi.class)
AndroidSdkProviderApi<FileT, FilesToRunProviderT, TransT> createInfo(
String buildToolsVersion,
FileT frameworkAidl,
/*noneable*/ Object aidlLib,
FileT androidJar,
/*noneable*/ Object sourceProperties,
FileT shrinkedAndroidJar,
FileT mainDexClasses,
FilesToRunProviderT adb,
FilesToRunProviderT dx,
FilesToRunProviderT mainDexListCreator,
FilesToRunProviderT aidl,
FilesToRunProviderT aapt,
/*noneable*/ Object aapt2,
/*noneable*/ Object apkBuilder,
FilesToRunProviderT apkSigner,
FilesToRunProviderT proguard,
FilesToRunProviderT zipalign)
throws EvalException;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.support;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.analysis.common.CommonAnalysisPlugin;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.reindex.ReindexPlugin;
import org.elasticsearch.indices.recovery.RecoveryState;
import org.elasticsearch.license.LicenseService;
import org.elasticsearch.persistent.PersistentTasksClusterService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.MockHttpTransport;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.action.util.QueryPage;
import org.elasticsearch.xpack.core.ilm.LifecycleSettings;
import org.elasticsearch.xpack.core.ml.MachineLearningField;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.DeleteDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.action.DeleteDatafeedAction;
import org.elasticsearch.xpack.core.ml.action.DeleteJobAction;
import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsStatsAction;
import org.elasticsearch.xpack.core.ml.action.GetDatafeedsAction;
import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction;
import org.elasticsearch.xpack.core.ml.action.GetJobsAction;
import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction;
import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState;
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.ilm.IndexLifecycle;
import org.elasticsearch.xpack.ml.LocalStateMachineLearning;
import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.monitoring.MonitoringService;
import org.junit.After;
import org.junit.Before;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import static org.hamcrest.Matchers.equalTo;
/**
* A base class for testing datafeed and job lifecycle specifics.
*
* Note for other type of integration tests you should use the external test cluster created by the Gradle integTest task.
* For example tests extending this base class test with the non native autodetect process.
*/
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0, supportsDedicatedMasters = false)
public abstract class BaseMlIntegTestCase extends ESIntegTestCase {
@Override
protected boolean ignoreExternalCluster() {
return true;
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal));
settings.put(MachineLearningField.AUTODETECT_PROCESS.getKey(), false);
settings.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), true);
settings.put(XPackSettings.SECURITY_ENABLED.getKey(), false);
settings.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
settings.put(XPackSettings.WATCHER_ENABLED.getKey(), false);
settings.put(XPackSettings.GRAPH_ENABLED.getKey(), false);
settings.put(MonitoringService.ENABLED.getKey(), false);
settings.put(MonitoringService.ELASTICSEARCH_COLLECTION_ENABLED.getKey(), false);
settings.put(LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING.getKey(), false);
return settings.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(
LocalStateMachineLearning.class,
CommonAnalysisPlugin.class,
ReindexPlugin.class,
// ILM is required for .ml-state template index settings
IndexLifecycle.class);
}
@Override
protected Collection<Class<? extends Plugin>> getMockPlugins() {
return Arrays.asList(TestSeedPlugin.class, MockHttpTransport.TestPlugin.class);
}
@Before
public void ensureTemplatesArePresent() throws Exception {
assertBusy(() -> {
ClusterState state = client().admin().cluster().prepareState().get().getState();
assertTrue("Timed out waiting for the ML templates to be installed",
MachineLearning.allTemplatesInstalled(state));
}, 20, TimeUnit.SECONDS);
}
protected Job.Builder createJob(String id) {
return createJob(id, null);
}
protected Job.Builder createJob(String id, ByteSizeValue modelMemoryLimit) {
return createJob(id, modelMemoryLimit, false);
}
protected Job.Builder createJob(String id, ByteSizeValue modelMemoryLimit, boolean allowLazyOpen) {
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setFormat(DataDescription.DataFormat.XCONTENT);
dataDescription.setTimeFormat(DataDescription.EPOCH_MS);
Detector.Builder d = new Detector.Builder("count", null);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
Job.Builder builder = new Job.Builder();
builder.setId(id);
if (modelMemoryLimit != null) {
builder.setAnalysisLimits(new AnalysisLimits(modelMemoryLimit.getMb(), null));
}
builder.setAnalysisConfig(analysisConfig);
builder.setDataDescription(dataDescription);
builder.setAllowLazyOpen(allowLazyOpen);
return builder;
}
public static Job.Builder createFareQuoteJob(String id) {
return createFareQuoteJob(id, null);
}
public static Job.Builder createFareQuoteJob(String id, ByteSizeValue modelMemoryLimit) {
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setFormat(DataDescription.DataFormat.XCONTENT);
dataDescription.setTimeFormat(DataDescription.EPOCH);
dataDescription.setTimeField("time");
Detector.Builder d = new Detector.Builder("metric", "responsetime");
d.setByFieldName("by_field_name");
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
analysisConfig.setBucketSpan(TimeValue.timeValueHours(1));
Job.Builder builder = new Job.Builder();
builder.setId(id);
if (modelMemoryLimit != null) {
builder.setAnalysisLimits(new AnalysisLimits(modelMemoryLimit.getMb(), null));
}
builder.setAnalysisConfig(analysisConfig);
builder.setDataDescription(dataDescription);
return builder;
}
public static Job.Builder createScheduledJob(String jobId) {
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setFormat(DataDescription.DataFormat.XCONTENT);
dataDescription.setTimeFormat("yyyy-MM-dd HH:mm:ss");
Detector.Builder d = new Detector.Builder("count", null);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
analysisConfig.setBucketSpan(TimeValue.timeValueHours(1));
Job.Builder builder = new Job.Builder();
builder.setId(jobId);
builder.setAnalysisConfig(analysisConfig);
builder.setDataDescription(dataDescription);
return builder;
}
public static DatafeedConfig createDatafeed(String datafeedId, String jobId, List<String> indices) {
return createDatafeed(datafeedId, jobId, indices, TimeValue.timeValueSeconds(1));
}
public static DatafeedConfig createDatafeed(String datafeedId, String jobId, List<String> indices, TimeValue frequency) {
return createDatafeedBuilder(datafeedId, jobId, indices, frequency).build();
}
public static DatafeedConfig.Builder createDatafeedBuilder(String datafeedId, String jobId, List<String> indices) {
return createDatafeedBuilder(datafeedId, jobId, indices, TimeValue.timeValueSeconds(1));
}
public static DatafeedConfig.Builder createDatafeedBuilder(String datafeedId, String jobId, List<String> indices, TimeValue frequency) {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(datafeedId, jobId);
builder.setQueryDelay(TimeValue.timeValueSeconds(1));
builder.setFrequency(frequency);
builder.setIndices(indices);
return builder;
}
@After
public void cleanupWorkaround() throws Exception {
logger.info("[{}#{}]: Cleaning up datafeeds and jobs after test", getTestClass().getSimpleName(), getTestName());
deleteAllDatafeeds(logger, client());
deleteAllJobs(logger, client());
deleteAllDataFrameAnalytics(client());
assertBusy(() -> {
RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries()
.setActiveOnly(true)
.get();
for (List<RecoveryState> recoveryStates : recoveryResponse.shardRecoveryStates().values()) {
assertThat(recoveryStates.size(), equalTo(0));
}
});
}
public static void indexDocs(Logger logger, String index, long numDocs, long start, long end) {
int maxDelta = (int) (end - start - 1);
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk();
for (int i = 0; i < numDocs; i++) {
IndexRequest indexRequest = new IndexRequest(index);
long timestamp = start + randomIntBetween(0, maxDelta);
assert timestamp >= start && timestamp < end;
indexRequest.source("time", timestamp, "@timestamp", timestamp).opType(DocWriteRequest.OpType.CREATE);
bulkRequestBuilder.add(indexRequest);
}
BulkResponse bulkResponse = bulkRequestBuilder
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();
if (bulkResponse.hasFailures()) {
int failures = 0;
for (BulkItemResponse itemResponse : bulkResponse) {
if (itemResponse.isFailed()) {
failures++;
logger.error("Item response failure [{}]", itemResponse.getFailureMessage());
}
}
fail("Bulk response contained " + failures + " failures");
}
logger.info("Indexed [{}] documents", numDocs);
}
public static GetJobsStatsAction.Response.JobStats getJobStats(String jobId) {
GetJobsStatsAction.Request request = new GetJobsStatsAction.Request(jobId);
GetJobsStatsAction.Response response = client().execute(GetJobsStatsAction.INSTANCE, request).actionGet();
if (response.getResponse().results().isEmpty()) {
return null;
} else {
return response.getResponse().results().get(0);
}
}
public static DataCounts getDataCounts(String jobId) {
GetJobsStatsAction.Response.JobStats jobStats = getJobStats(jobId);
if (jobStats != null) {
return jobStats.getDataCounts();
} else {
return new DataCounts(jobId);
}
}
public static GetDatafeedsStatsAction.Response.DatafeedStats getDatafeedStats(String datafeedId) {
GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(datafeedId);
GetDatafeedsStatsAction.Response response = client().execute(GetDatafeedsStatsAction.INSTANCE, request).actionGet();
if (response.getResponse().results().isEmpty()) {
return null;
} else {
return response.getResponse().results().get(0);
}
}
public static void deleteAllDatafeeds(Logger logger, Client client) throws Exception {
final QueryPage<DatafeedConfig> datafeeds =
client.execute(GetDatafeedsAction.INSTANCE, new GetDatafeedsAction.Request(GetDatafeedsAction.ALL)).actionGet().getResponse();
try {
logger.info("Closing all datafeeds (using _all)");
StopDatafeedAction.Response stopResponse = client
.execute(StopDatafeedAction.INSTANCE, new StopDatafeedAction.Request("_all"))
.get();
assertTrue(stopResponse.isStopped());
} catch (ExecutionException e1) {
try {
StopDatafeedAction.Request request = new StopDatafeedAction.Request("_all");
request.setForce(true);
StopDatafeedAction.Response stopResponse = client
.execute(StopDatafeedAction.INSTANCE, request).get();
assertTrue(stopResponse.isStopped());
} catch (ExecutionException e2) {
logger.warn("Force-stopping datafeed with _all failed.", e2);
}
throw new RuntimeException(
"Had to resort to force-stopping datafeed, something went wrong?", e1);
}
for (final DatafeedConfig datafeed : datafeeds.results()) {
assertBusy(() -> {
try {
GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(datafeed.getId());
GetDatafeedsStatsAction.Response r = client.execute(GetDatafeedsStatsAction.INSTANCE, request).get();
assertThat(r.getResponse().results().get(0).getDatafeedState(), equalTo(DatafeedState.STOPPED));
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
});
AcknowledgedResponse deleteResponse =
client.execute(DeleteDatafeedAction.INSTANCE, new DeleteDatafeedAction.Request(datafeed.getId())).get();
assertTrue(deleteResponse.isAcknowledged());
}
}
public static void deleteAllJobs(Logger logger, Client client) throws Exception {
final QueryPage<Job> jobs =
client.execute(GetJobsAction.INSTANCE, new GetJobsAction.Request(Metadata.ALL)).actionGet().getResponse();
try {
CloseJobAction.Request closeRequest = new CloseJobAction.Request(Metadata.ALL);
// This usually takes a lot less than 90 seconds, but has been observed to be very slow occasionally
// in CI and a 90 second timeout will avoid the cost of investigating these intermittent failures.
// See https://github.com/elastic/elasticsearch/issues/48511
closeRequest.setCloseTimeout(TimeValue.timeValueSeconds(90L));
logger.info("Closing jobs using [{}]", Metadata.ALL);
CloseJobAction.Response response = client.execute(CloseJobAction.INSTANCE, closeRequest).get();
assertTrue(response.isClosed());
} catch (Exception e1) {
try {
CloseJobAction.Request closeRequest = new CloseJobAction.Request(Metadata.ALL);
closeRequest.setForce(true);
closeRequest.setCloseTimeout(TimeValue.timeValueSeconds(30L));
CloseJobAction.Response response = client.execute(CloseJobAction.INSTANCE, closeRequest).get();
assertTrue(response.isClosed());
} catch (Exception e2) {
logger.warn("Force-closing jobs failed.", e2);
}
throw new RuntimeException("Had to resort to force-closing job, something went wrong?", e1);
}
for (final Job job : jobs.results()) {
assertBusy(() -> {
GetJobsStatsAction.Response statsResponse =
client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet();
assertEquals(JobState.CLOSED, statsResponse.getResponse().results().get(0).getState());
});
AcknowledgedResponse response =
client.execute(DeleteJobAction.INSTANCE, new DeleteJobAction.Request(job.getId())).get();
assertTrue(response.isAcknowledged());
}
}
public static void deleteAllDataFrameAnalytics(Client client) throws Exception {
final QueryPage<DataFrameAnalyticsConfig> analytics =
client.execute(GetDataFrameAnalyticsAction.INSTANCE,
new GetDataFrameAnalyticsAction.Request("_all")).get().getResources();
assertBusy(() -> {
GetDataFrameAnalyticsStatsAction.Response statsResponse =
client().execute(GetDataFrameAnalyticsStatsAction.INSTANCE, new GetDataFrameAnalyticsStatsAction.Request("_all")).get();
assertTrue(statsResponse.getResponse().results().stream().allMatch(s -> s.getState().equals(DataFrameAnalyticsState.STOPPED)));
});
for (final DataFrameAnalyticsConfig config : analytics.results()) {
client.execute(DeleteDataFrameAnalyticsAction.INSTANCE, new DeleteDataFrameAnalyticsAction.Request(config.getId())).actionGet();
}
}
protected static <T> void blockingCall(Consumer<ActionListener<T>> function,
AtomicReference<T> response,
AtomicReference<Exception> error) throws InterruptedException {
CountDownLatch latch = new CountDownLatch(1);
ActionListener<T> listener = ActionListener.wrap(
r -> {
response.set(r);
latch.countDown();
},
e -> {
error.set(e);
latch.countDown();
}
);
function.accept(listener);
latch.await();
}
protected String awaitJobOpenedAndAssigned(String jobId, String queryNode) throws Exception {
PersistentTasksClusterService persistentTasksClusterService =
internalCluster().getInstance(PersistentTasksClusterService.class, internalCluster().getMasterName(queryNode));
// Speed up rechecks to a rate that is quicker than what settings would allow.
// The check would work eventually without doing this, but the assertBusy() below
// would need to wait 30 seconds, which would make the test run very slowly.
// The 1 second refresh puts a greater burden on the master node to recheck
// persistent tasks, but it will cope in these tests as it's not doing much
// else.
persistentTasksClusterService.setRecheckInterval(TimeValue.timeValueSeconds(1));
AtomicReference<String> jobNode = new AtomicReference<>();
assertBusy(() -> {
GetJobsStatsAction.Response statsResponse =
client(queryNode).execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(jobId)).actionGet();
GetJobsStatsAction.Response.JobStats jobStats = statsResponse.getResponse().results().get(0);
assertEquals(JobState.OPENED, jobStats.getState());
assertNotNull(jobStats.getNode());
jobNode.set(jobStats.getNode().getName());
});
return jobNode.get();
}
/**
* Sets delayed allocation to 0 to make sure we have tests are not delayed
*/
protected void setMlIndicesDelayedNodeLeftTimeoutToZero() {
client().admin().indices().updateSettings(new UpdateSettingsRequest(".ml-*")
.settings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0).build()))
.actionGet();
}
}
| |
package gov.nih.nci.cagrid.gridgrouper.common;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.GrantPrivilegeFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupAddFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupDeleteFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupModifyFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.MemberAddFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.MemberDeleteFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.RevokePrivilegeFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.SchemaFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.StemAddFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.StemDeleteFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.StemModifyFault;
import gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault;
import java.rmi.RemoteException;
/**
* GridGrouper Service
*
* This class is autogenerated, DO NOT EDIT.
*
* @created by Introduce Toolkit version 1.0
*/
public interface GridGrouperI {
public gov.nih.nci.cagrid.metadata.security.ServiceSecurityMetadata getServiceSecurityMetadata() throws RemoteException ;
/**
* Obtains a specified stem.
*
* @param stem
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws StemNotFoundFault
* The stem specified could not be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.StemDescriptor getStem(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault ;
/**
* Obtains a list of child stems for a specified parent stem.
*
* @param parentStem
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws StemNotFoundFault
* The stem specified could not be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.StemDescriptor[] getChildStems(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier parentStem) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault ;
/**
* Obtains the parent stem for a specified stem.
*
* @param childStem
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws StemNotFoundFault
* The stem specified could not be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.StemDescriptor getParentStem(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier childStem) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault ;
/**
* Updates the information for a specified stem.
*
* @param stem
* @param update
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
* @throws StemModifyFault
* An error occurred updating the specified stem.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.StemDescriptor updateStem(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem,gov.nih.nci.cagrid.gridgrouper.bean.StemUpdate update) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemModifyFault ;
/**
* Obtains a list of subjects with a specified privilege on a specified stem.
*
* @param stem
* @param privilege
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws StemNotFoundFault
* The stem specified could not be found.
*/
public java.lang.String[] getSubjectsWithStemPrivilege(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem,gov.nih.nci.cagrid.gridgrouper.bean.StemPrivilegeType privilege) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault ;
/**
* Obtains a list of privileges for a specified subject on a specified stem.
*
* @param stem
* @param subject
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws StemNotFoundFault
* The stem specified could not be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.StemPrivilege[] getStemPrivileges(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem,java.lang.String subject) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault ;
/**
* Returns whether or not a specified subject has a specified privilege on a specified stem.
*
* @param stem
* @param subject
* @param privilege
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws StemNotFoundFault
* The stem specified could not be found.
*/
public boolean hasStemPrivilege(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem,java.lang.String subject,gov.nih.nci.cagrid.gridgrouper.bean.StemPrivilegeType privilege) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault ;
/**
* Grants a specified privilege to a specified subject on a specified stem.
*
* @param stem
* @param subject
* @param privilege
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws StemNotFoundFault
* The stem specified could not be found.
* @throws GrantPrivilegeFault
* An error occurred in granting the privilege.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
* @throws SchemaFault
* An unexpected schema error occurred in performing the request.
*/
public void grantStemPrivilege(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem,java.lang.String subject,gov.nih.nci.cagrid.gridgrouper.bean.StemPrivilegeType privilege) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GrantPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.SchemaFault ;
/**
* Revokes a specified privilege for specified subject on a specified stem.
*
* @param stem
* @param subject
* @param privilege
* @throws StemNotFoundFault
* The stem specified could not be found.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
* @throws RevokePrivilegeFault
* An error occurred in revoking the privilege.
* @throws SchemaFault
* An unexpected schema error occurred in performing the request.
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
*/
public void revokeStemPrivilege(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem,java.lang.String subject,gov.nih.nci.cagrid.gridgrouper.bean.StemPrivilegeType privilege) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.RevokePrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.SchemaFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault ;
/**
* Adds a child stem to a specified stem.
*
* @param stem
* @param extension
* @param displayExtension
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
* @throws StemAddFault
* An error occurred in adding the specified stem.
* @throws StemNotFoundFault
* The stem specified could not be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.StemDescriptor addChildStem(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem,java.lang.String extension,java.lang.String displayExtension) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemAddFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault ;
/**
* Deletes a specified stem.
*
* @param stem
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
* @throws StemDeleteFault
* An error occurred in deleting the specified stem.
* @throws StemNotFoundFault
* The stem specified could not be found.
*/
public void deleteStem(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemDeleteFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault ;
/**
* Gets a specified group.
*
* @param group
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.GroupDescriptor getGroup(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault ;
/**
* Gets the child groups for a specified stem.
*
* @param stem
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws StemNotFoundFault
* The stem specified could not be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.GroupDescriptor[] getChildGroups(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.StemNotFoundFault ;
/**
* Add a child group to a specified stem.
*
* @param stem
* @param extension
* @param displayExtension
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupAddFault
* The specified group cannot be found.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.GroupDescriptor addChildGroup(gov.nih.nci.cagrid.gridgrouper.bean.StemIdentifier stem,java.lang.String extension,java.lang.String displayExtension) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupAddFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault ;
/**
* Delete a specified group.
*
* @param group
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
* @throws GroupDeleteFault
* An error occurred in deleting the specified group.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
*/
public void deleteGroup(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupDeleteFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault ;
/**
* Update the information for a specified group.
*
* @param group
* @param update
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
* @throws GroupModifyFault
* An error occurred in modifying the specified group.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.GroupDescriptor updateGroup(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,gov.nih.nci.cagrid.gridgrouper.bean.GroupUpdate update) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupModifyFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault ;
/**
* Add a member to a specified group.
*
* @param group
* @param subject
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
* @throws MemberAddFault
* An error occurred adding a member to the specified group.
*/
public void addMember(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,java.lang.String subject) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.MemberAddFault ;
/**
* Gets the members of a specified group based on a specified member type (All, Immediate, Effective, or Composite)
*
* @param group
* @param filter
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.MemberDescriptor[] getMembers(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,gov.nih.nci.cagrid.gridgrouper.bean.MemberFilter filter) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault ;
/**
* Determines whether a specified subject is a member of a specified type (All, Immediate, Effective, or Composite) or a specified group.
*
* @param group
* @param member
* @param filter
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
*/
public boolean isMemberOf(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,java.lang.String member,gov.nih.nci.cagrid.gridgrouper.bean.MemberFilter filter) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault ;
/**
* Gets the memberhships of a specified group based on a specified member type (All, Immediate, Effective, or Composite)
*
* @param group
* @param filter
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.MembershipDescriptor[] getMemberships(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,gov.nih.nci.cagrid.gridgrouper.bean.MemberFilter filter) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault ;
/**
* Deletes a member of a specified group
*
* @param group
* @param member
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
* @throws GroupNotFoundFault
* The specified group cannot be found.
* @throws MemberDeleteFault
* An error occurred in deleting a member from a specified group.
*/
public void deleteMember(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,java.lang.String member) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.MemberDeleteFault ;
/**
* Adds a composite member to a specified group
*
* @param type
* @param composite
* @param left
* @param right
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
* @throws MemberAddFault
* An error occurred in adding a composite member to a specified group.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.GroupDescriptor addCompositeMember(gov.nih.nci.cagrid.gridgrouper.bean.GroupCompositeType type,gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier composite,gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier left,gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier right) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.MemberAddFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault ;
/**
* Deletes a composite member from a specified group.
*
* @param group
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
* @throws MemberDeleteFault
* An error occurred in deleting a composite member from a specified group.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.GroupDescriptor deleteCompositeMember(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.MemberDeleteFault ;
/**
* Grant a specified privilege for a specified subject on a specified group.
*
* @param group
* @param subject
* @param privilege
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
* @throws GrantPrivilegeFault
* An error occurred in granting the privilege.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
*/
public void grantGroupPrivilege(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,java.lang.String subject,gov.nih.nci.cagrid.gridgrouper.bean.GroupPrivilegeType privilege) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GrantPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault ;
/**
* Revoke a specified privilege for a specified subject on a specified group.
*
* @param group
* @param subject
* @param privilege
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
* @throws RevokePrivilegeFault
* An error occurred in revoking the privilege.
* @throws InsufficientPrivilegeFault
* The client does not have the required privileges needed to perform the request.
* @throws SchemaFault
* An unexpected schema error occurred in performing the request.
*/
public void revokeGroupPrivilege(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,java.lang.String subject,gov.nih.nci.cagrid.gridgrouper.bean.GroupPrivilegeType privilege) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.RevokePrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.SchemaFault ;
/**
* Gets a list of subject with a specified privilege on a specified group.
*
* @param group
* @param privilege
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
*/
public java.lang.String[] getSubjectsWithGroupPrivilege(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,gov.nih.nci.cagrid.gridgrouper.bean.GroupPrivilegeType privilege) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault ;
/**
* Gets a list of privileges for a specified subject on a specified group.
*
* @param group
* @param subject
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
*/
public gov.nih.nci.cagrid.gridgrouper.bean.GroupPrivilege[] getGroupPrivileges(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,java.lang.String subject) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault ;
/**
* Returns whether or not a specified subject has a specified privilege on a specified group.
*
* @param group
* @param subject
* @param privilege
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
* @throws GroupNotFoundFault
* The specified group cannot be found.
*/
public boolean hasGroupPrivilege(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,java.lang.String subject,gov.nih.nci.cagrid.gridgrouper.bean.GroupPrivilegeType privilege) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.GroupNotFoundFault ;
/**
* Returns whether or not a specified subject is a member of a specified group.
*
* @param member
* @param expression
* @throws GridGrouperRuntimeFault
* An unexpected Grouper/Grid Grouper error occurred.
*/
public boolean isMember(java.lang.String member,gov.nih.nci.cagrid.gridgrouper.bean.MembershipExpression expression) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault ;
/**
* Get information about a member.
*
* @param member
* @throws GridGrouperRuntimeFault
*
* @throws InsufficientPrivilegeFault
*
*/
public gov.nih.nci.cagrid.gridgrouper.bean.MemberDescriptor getMember(java.lang.String member) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault ;
public gov.nih.nci.cagrid.gridgrouper.bean.GroupDescriptor[] getMembersGroups(java.lang.String member,gov.nih.nci.cagrid.gridgrouper.bean.MembershipType type) throws RemoteException, gov.nih.nci.cagrid.gridgrouper.stubs.types.GridGrouperRuntimeFault, gov.nih.nci.cagrid.gridgrouper.stubs.types.InsufficientPrivilegeFault ;
public org.oasis.wsrf.properties.GetMultipleResourcePropertiesResponse getMultipleResourceProperties(org.oasis.wsrf.properties.GetMultipleResourceProperties_Element params) throws RemoteException ;
public org.oasis.wsrf.properties.GetResourcePropertyResponse getResourceProperty(javax.xml.namespace.QName params) throws RemoteException ;
public org.oasis.wsrf.properties.QueryResourcePropertiesResponse queryResourceProperties(org.oasis.wsrf.properties.QueryResourceProperties_Element params) throws RemoteException ;
public void addMembershipRequest(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group) throws RemoteException ;
/**
* Update the status of the membership request.
*
* @param group
* @param subject
* @param update
*/
public gov.nih.nci.cagrid.gridgrouper.bean.MembershipRequestDescriptor updateMembershipRequest(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,java.lang.String subject,gov.nih.nci.cagrid.gridgrouper.bean.MembershipRequestUpdate update) throws RemoteException ;
/**
* Retrieve the group's membership requests based on request status.
*
* @param group
* @param status
*/
public gov.nih.nci.cagrid.gridgrouper.bean.MembershipRequestDescriptor[] getMembershipRequests(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group,gov.nih.nci.cagrid.gridgrouper.bean.MembershipRequestStatus status) throws RemoteException ;
/**
* Enable a group to accept membership requests
*
* @param group
*/
public void enableMembershipRequests(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group) throws RemoteException ;
/**
* Removes the ability of a group to accept membership requests
*
* @param group
*/
public void disableMembershipRequests(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group) throws RemoteException ;
/**
* Returns whether or not the group allows membership requests
*
* @param group
*/
public boolean isMembershipRequestEnabled(gov.nih.nci.cagrid.gridgrouper.bean.GroupIdentifier group) throws RemoteException ;
}
| |
package com.sudwood.betterhoppers.blocks;
import java.util.List;
import java.util.Random;
import com.sudwood.betterhoppers.BetterHoppers;
import com.sudwood.betterhoppers.tileentities.TileEntityFasterHopper;
import net.minecraft.block.Block;
import net.minecraft.block.BlockContainer;
import net.minecraft.block.BlockHopper;
import net.minecraft.block.material.Material;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.texture.IconRegister;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.Container;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.tileentity.TileEntityHopper;
import com.sudwood.betterhoppers.tileentities.TileEntityFasterHopper;
import net.minecraft.util.AxisAlignedBB;
import net.minecraft.util.Facing;
import net.minecraft.util.Icon;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.client.MinecraftForgeClient;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class BlockFasterHopper extends BlockHopper
{
private final Random field_94457_a = new Random();
@SideOnly(Side.CLIENT)
private static Icon hopperIcon;
@SideOnly(Side.CLIENT)
private static Icon hopperTopIcon;
@SideOnly(Side.CLIENT)
private static Icon hopperInsideIcon;
public BlockFasterHopper(int par1)
{
super(par1);
this.setBlockBounds(0.0F, 0.0F, 0.0F, 1.0F, 1F, 1.0F);
}
/**
* Updates the blocks bounds based on its current state. Args: world, x, y, z
*/
public void setBlockBoundsBasedOnState(IBlockAccess par1IBlockAccess, int par2, int par3, int par4)
{
this.setBlockBounds(0.0F, 0.0F, 0.0F, 1.0F, 1F, 1.0F);
}
public void onEntityCollidedWithBlock(World par1World, int par2, int par3, int par4, Entity par5Entity)
{
if(par5Entity instanceof EntityItem)
{
boolean temp = false;
TileEntityFasterHopper tile = (TileEntityFasterHopper) par1World.getBlockTileEntity(par2, par3, par4);
temp = tile.putItemInThisInventory(((EntityItem) par5Entity).getEntityItem());
if(temp)
par5Entity.setDead();
}
}
/**
* Adds all intersecting collision boxes to a list. (Be sure to only add boxes to the list if they intersect the
* mask.) Parameters: World, X, Y, Z, mask, list, colliding entity
*/
public void addCollisionBoxesToList(World par1World, int par2, int par3, int par4, AxisAlignedBB par5AxisAlignedBB, List par6List, Entity par7Entity)
{
this.setBlockBounds(0.0F, 0.0F, 0.0F, 1.0F, 0.625F, 1.0F);
super.addCollisionBoxesToList(par1World, par2, par3, par4, par5AxisAlignedBB, par6List, par7Entity);
float f = 0.125F;
this.setBlockBounds(0.0F, 0.0F, 0.0F, f, 1F, 1.0F);
super.addCollisionBoxesToList(par1World, par2, par3, par4, par5AxisAlignedBB, par6List, par7Entity);
this.setBlockBounds(0.0F, 0.0F, 0.0F, 1.0F, 1F, f);
super.addCollisionBoxesToList(par1World, par2, par3, par4, par5AxisAlignedBB, par6List, par7Entity);
this.setBlockBounds(1.0F - f, 0.0F, 0.0F, 1.0F, 1F, 1.0F);
super.addCollisionBoxesToList(par1World, par2, par3, par4, par5AxisAlignedBB, par6List, par7Entity);
this.setBlockBounds(0.0F, 0.0F, 1.0F - f, 1.0F, 1F, 1.0F);
super.addCollisionBoxesToList(par1World, par2, par3, par4, par5AxisAlignedBB, par6List, par7Entity);
this.setBlockBounds(0.0F, 0.0F, 0.0F, 1.0F, 1F, 1.0F);
}
/**
* Called when a block is placed using its ItemBlock. Args: World, X, Y, Z, side, hitX, hitY, hitZ, block metadata
*/
public int onBlockPlaced(World par1World, int par2, int par3, int par4, int par5, float par6, float par7, float par8, int par9)
{
int j1 = Facing.oppositeSide[par5];
if (j1 == 1)
{
j1 = 0;
}
return j1;
}
/**
* Returns a new instance of a block's tile entity class. Called on placing the block.
*/
public TileEntity createNewTileEntity(World par1World)
{
return new TileEntityFasterHopper();
}
/**
* Called when the block is placed in the world.
*/
public void onBlockPlacedBy(World par1World, int par2, int par3, int par4, EntityLivingBase par5EntityLivingBase, ItemStack par6ItemStack)
{
super.onBlockPlacedBy(par1World, par2, par3, par4, par5EntityLivingBase, par6ItemStack);
}
/**
* Called whenever the block is added into the world. Args: world, x, y, z
*/
public void onBlockAdded(World par1World, int par2, int par3, int par4)
{
super.onBlockAdded(par1World, par2, par3, par4);
this.updateMetadata(par1World, par2, par3, par4);
}
/**
* Called upon block activation (right click on the block.)
*/
public boolean onBlockActivated(World par1World, int par2, int par3, int par4, EntityPlayer par5EntityPlayer, int par6, float par7, float par8, float par9)
{
if (par1World.isRemote)
{
return true;
}
else
{
TileEntityFasterHopper TileEntityFasterHopper = (com.sudwood.betterhoppers.tileentities.TileEntityFasterHopper) par1World.getBlockTileEntity(par2, par3, par4);
if (TileEntityFasterHopper != null)
{
par5EntityPlayer.openGui(BetterHoppers.instance, 0, par1World, par2, par3, par4);
}
return true;
}
}
/**
* Lets the block know when one of its neighbor changes. Doesn't know which neighbor changed (coordinates passed are
* their own) Args: x, y, z, neighbor blockID
*/
public void onNeighborBlockChange(World par1World, int par2, int par3, int par4, int par5)
{
this.updateMetadata(par1World, par2, par3, par4);
}
/**
* Updates the Metadata to include if the Hopper gets powered by Redstone or not
*/
private void updateMetadata(World par1World, int par2, int par3, int par4)
{
int l = par1World.getBlockMetadata(par2, par3, par4);
int i1 = getDirectionFromMetadata(l);
boolean flag = !par1World.isBlockIndirectlyGettingPowered(par2, par3, par4);
boolean flag1 = getIsBlockNotPoweredFromMetadata(l);
if (flag != flag1)
{
par1World.setBlockMetadataWithNotify(par2, par3, par4, i1 | (flag ? 0 : 8), 4);
}
}
/**
* Called on server worlds only when the block has been replaced by a different block ID, or the same block with a
* different metadata value, but before the new metadata value is set. Args: World, x, y, z, old block ID, old
* metadata
*/
public void breakBlock(World par1World, int par2, int par3, int par4, int par5, int par6)
{
TileEntityFasterHopper TileEntityFasterHopper = (TileEntityFasterHopper)par1World.getBlockTileEntity(par2, par3, par4);
if (TileEntityFasterHopper != null)
{
for (int j1 = 0; j1 < TileEntityFasterHopper.getSizeInventory(); ++j1)
{
ItemStack itemstack = TileEntityFasterHopper.getStackInSlot(j1);
if (itemstack != null)
{
float f = this.field_94457_a.nextFloat() * 0.8F + 0.1F;
float f1 = this.field_94457_a.nextFloat() * 0.8F + 0.1F;
float f2 = this.field_94457_a.nextFloat() * 0.8F + 0.1F;
while (itemstack.stackSize > 0)
{
int k1 = this.field_94457_a.nextInt(21) + 10;
if (k1 > itemstack.stackSize)
{
k1 = itemstack.stackSize;
}
itemstack.stackSize -= k1;
EntityItem entityitem = new EntityItem(par1World, (double)((float)par2 + f), (double)((float)par3 + f1), (double)((float)par4 + f2), new ItemStack(itemstack.itemID, k1, itemstack.getItemDamage()));
if (itemstack.hasTagCompound())
{
entityitem.getEntityItem().setTagCompound((NBTTagCompound)itemstack.getTagCompound().copy());
}
float f3 = 0.05F;
entityitem.motionX = (double)((float)this.field_94457_a.nextGaussian() * f3);
entityitem.motionY = (double)((float)this.field_94457_a.nextGaussian() * f3 + 0.2F);
entityitem.motionZ = (double)((float)this.field_94457_a.nextGaussian() * f3);
par1World.spawnEntityInWorld(entityitem);
}
}
}
par1World.func_96440_m(par2, par3, par4, par5);
}
par1World.removeBlockTileEntity(par2, par3, par4);
}
/**
* The type of render function that is called for this block
*/
public int getRenderType()
{
return 38;
}
/**
* If this block doesn't render as an ordinary block it will return False (examples: signs, buttons, stairs, etc)
*/
public boolean renderAsNormalBlock()
{
return false;
}
/**
* Is this block (a) opaque and (b) a full 1m cube? This determines whether or not to render the shared face of two
* adjacent blocks and also whether the player can attach torches, redstone wire, etc to this block.
*/
public boolean isOpaqueCube()
{
return false;
}
public static int getDirectionFromMetadata(int par0)
{
return par0 & 7;
}
@SideOnly(Side.CLIENT)
/**
* Returns true if the given side of this block type should be rendered, if the adjacent block is at the given
* coordinates. Args: blockAccess, x, y, z, side
*/
public boolean shouldSideBeRendered(IBlockAccess par1IBlockAccess, int par2, int par3, int par4, int par5)
{
return true;
}
@SideOnly(Side.CLIENT)
/**
* From the specified side and block metadata retrieves the blocks texture. Args: side, metadata
*/
public Icon getIcon(int par1, int par2)
{
return par1 == 1 ? this.hopperTopIcon : this.hopperIcon;
}
public static boolean getIsBlockNotPoweredFromMetadata(int par0)
{
return (par0 & 8) != 8;
}
/**
* If this returns true, then comparators facing away from this block will use the value from
* getComparatorInputOverride instead of the actual redstone signal strength.
*/
public boolean hasComparatorInputOverride()
{
return true;
}
/**
* If hasComparatorInputOverride returns true, the return value from this is used instead of the redstone signal
* strength when this block inputs to a comparator.
*/
public int getComparatorInputOverride(World par1World, int par2, int par3, int par4, int par5)
{
return Container.calcRedstoneFromInventory(getHopperTile(par1World, par2, par3, par4));
}
@SideOnly(Side.CLIENT)
/**
* When this method is called, your block should register all the icons it needs with the given IconRegister. This
* is the only chance you get to register icons.
*/
public void registerIcons(IconRegister par1IconRegister)
{
this.hopperIcon = par1IconRegister.registerIcon("BetterHoppers:faster_hopper_outside");
this.hopperTopIcon = par1IconRegister.registerIcon("hopper_top");
this.hopperInsideIcon = par1IconRegister.registerIcon("hopper_inside");
}
@SideOnly(Side.CLIENT)
public static Icon getHopperIcon(String par0Str)
{
return par0Str.equals("hopper_outside") ? hopperIcon : (par0Str.equals("hopper_inside") ? hopperInsideIcon : null);
}
@SideOnly(Side.CLIENT)
/**
* Gets the icon name of the ItemBlock corresponding to this block. Used by hoppers.
*/
public String getItemIconName()
{
return "BetterHoppers:faster_hopper";
}
public static TileEntityHopper getHopperTile(IBlockAccess par0IBlockAccess, int par1, int par2, int par3)
{
return (TileEntityHopper)par0IBlockAccess.getBlockTileEntity(par1, par2, par3);
}
}
| |
/*-
* #%L
* utils-commons
* %%
* Copyright (C) 2016 - 2018 Gilles Landel
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package fr.landel.utils.commons.function;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.Test;
import fr.landel.utils.commons.exception.FunctionException;
/**
* Check {@link HeptaPredicateThrowable}
*
* @since Nov 18, 2017
* @author Gilles
*
*/
public class HeptaPredicateThrowableTest {
private static final String ERROR1 = "The first argument is null";
private static final String ERROR2 = "The second argument is null";
private static final String ERROR3 = "Both arguments are null";
private static final String ERROR4 = "First string is not in upper case";
private static final String ERROR5 = "Second string is not in upper case";
private static final String ERROR6 = "Both strings are not in upper case";
private static final HeptaPredicateThrowable<String, String, Integer, String, String, String, String, IllegalArgumentException> P1 = (
s1, s2, i, s3, s4, s5, s6) -> {
if (s1 != null && s2 != null && i > 0) {
return s1.length() > s2.length();
} else if (s1 != null) {
throw new IllegalArgumentException(ERROR2);
} else if (s2 != null) {
throw new IllegalArgumentException(ERROR1);
}
throw new IllegalArgumentException(ERROR3);
};
private static final HeptaPredicateThrowable<String, String, Integer, String, String, String, String, IllegalArgumentException> P2 = (
s1, s2, i, s3, s4, s5, s6) -> {
String s1u = s1.toUpperCase();
String s2u = s2.toUpperCase();
if (s1u.equals(s1) && s2u.equals(s2)) {
return s1u.contains(s2u) || s2u.contains(s1u);
} else if (!s1u.equals(s1)) {
throw new IllegalArgumentException(ERROR4);
} else if (!s2u.equals(s2)) {
throw new IllegalArgumentException(ERROR5);
}
throw new IllegalArgumentException(ERROR6);
};
/**
* Test method for
* {@link HeptaPredicateThrowable#test(java.lang.Object, java.lang.Object)}.
*/
@Test
public void testTest() {
try {
assertTrue(P1.test("v12", "v8", 1, "turbo", "t", "u", "r"));
} catch (FunctionException e) {
fail("Predicate failed");
}
try {
P1.test(null, "v2", 1, "turbo", "t", "u", "r");
fail("Predicate has to fail");
} catch (IllegalArgumentException e) {
assertNotNull(e);
assertEquals(ERROR1, e.getMessage());
}
}
/**
* Test method for
* {@link HeptaPredicateThrowable#testThrows(java.lang.Object, java.lang.Object)}.
*/
@Test
public void testTestThrows() {
try {
assertTrue(P1.testThrows("v12", "v8", 1, "turbo", "t", "u", "r"));
} catch (IllegalArgumentException e) {
fail("Predicate failed");
}
try {
P1.testThrows(null, "v2", 1, "turbo", "t", "u", "r");
fail("Predicate has to fail");
} catch (IllegalArgumentException e) {
assertNotNull(e);
assertEquals(ERROR1, e.getMessage());
}
}
/**
* Test method for
* {@link HeptaPredicateThrowable#and(HeptaPredicateThrowable)}.
*/
@Test
public void testAnd() {
final HeptaPredicateThrowable<String, String, Integer, String, String, String, String, IllegalArgumentException> pp = P1.and(P2);
try {
assertTrue(pp.testThrows("V12", "V1", 1, "turbo", "t", "u", "r"));
assertFalse(pp.testThrows("V12", "V8", 1, "turbo", "t", "u", "r"));
assertFalse(pp.testThrows("V6", "V12", 1, "turbo", "t", "u", "r"));
assertFalse(pp.testThrows("V6", "V6", 1, "turbo", "t", "u", "r"));
} catch (IllegalArgumentException e) {
fail("Predicate failed");
}
try {
pp.testThrows(null, "V8", 1, "turbo", "t", "u", "r");
fail("Predicate has to fail");
} catch (IllegalArgumentException e) {
assertNotNull(e);
assertEquals(ERROR1, e.getMessage());
}
try {
pp.testThrows("V12", "v8", 1, "turbo", "t", "u", "r");
fail("Predicate has to fail");
} catch (IllegalArgumentException e) {
assertNotNull(e);
assertEquals(ERROR5, e.getMessage());
}
}
/**
* Test method for {@link HeptaPredicateThrowable#negateThrows()}.
*/
@Test
public void testNegateThrows() {
final HeptaPredicateThrowable<String, String, Integer, String, String, String, String, IllegalArgumentException> pp = P1
.negateThrows();
try {
assertFalse(pp.testThrows("V12", "V8", 1, "turbo", "t", "u", "r"));
assertTrue(pp.testThrows("v6", "V8", 2, "turbo", "t", "u", "r"));
} catch (IllegalArgumentException e) {
fail("Predicate failed");
}
try {
pp.testThrows("V6", null, 1, "turbo", "t", "u", "r");
fail("Predicate has to fail");
} catch (IllegalArgumentException e) {
assertNotNull(e);
assertEquals(ERROR2, e.getMessage());
}
}
/**
* Test method for
* {@link HeptaPredicateThrowable#or(HeptaPredicateThrowable)}.
*/
@Test
public void testOr() {
final HeptaPredicateThrowable<String, String, Integer, String, String, String, String, IllegalArgumentException> pp = P1.or(P2);
try {
assertTrue(pp.testThrows("V12", "V1", 1, "turbo", "t", "u", "r"));
assertTrue(pp.testThrows("V", "V1", 1, "turbo", "t", "u", "r"));
assertTrue(pp.testThrows("V12", "V12", 1, "turbo", "t", "u", "r"));
assertFalse(pp.testThrows("V6", "V12", 1, "turbo", "t", "u", "r"));
} catch (IllegalArgumentException e) {
fail("Predicate failed");
}
try {
pp.testThrows(null, "V8", 1, "turbo", "t", "u", "r");
fail("Predicate has to fail");
} catch (IllegalArgumentException e) {
assertNotNull(e);
assertEquals(ERROR1, e.getMessage());
}
try {
// first test pass and return true, so the second one is not
// executed
assertTrue(pp.testThrows("V12", "v8", 1, "turbo", "t", "u", "r"));
} catch (IllegalArgumentException e) {
fail("Predicate failed");
}
try {
// first test pass and return false, so the next is executed
pp.testThrows("v6", "V8", 1, "turbo", "t", "u", "r");
fail("Predicate has to fail");
} catch (IllegalArgumentException e) {
assertNotNull(e);
assertEquals(ERROR4, e.getMessage());
}
}
}
| |
package io.noties.markwon.image;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class AsyncDrawableTest {
private ImageSizeResolver imageSizeResolver;
@Before
public void before() {
imageSizeResolver = new ImageSizeResolverDef();
}
@Test
public void no_dimensions_await() {
// when drawable have no known dimensions yet, it will await for them
final AsyncDrawable drawable = new AsyncDrawable("",
mock(AsyncDrawableLoader.class),
imageSizeResolver,
new ImageSize(new ImageSize.Dimension(100.F, "%"), null));
final Drawable result = new AbstractDrawable();
result.setBounds(0, 0, 0, 0);
assertFalse(drawable.hasResult());
drawable.setResult(result);
assertTrue(drawable.hasResult());
assertTrue(result.getBounds().isEmpty());
drawable.initWithKnownDimensions(100, 1);
assertEquals(
new Rect(0, 0, 100, 0),
result.getBounds()
);
}
@Test
public void previous_result_detached() {
// when result is present it will be detached (setCallback(null))
final AsyncDrawable drawable = new AsyncDrawable("",
mock(AsyncDrawableLoader.class),
imageSizeResolver,
null);
drawable.setCallback2(mock(Drawable.Callback.class));
drawable.initWithKnownDimensions(100, 1);
final Drawable result1 = new AbstractDrawable();
final Drawable result2 = new AbstractDrawable();
drawable.setResult(result1);
assertNotNull(result1.getCallback());
drawable.setResult(result2);
assertNull(result1.getCallback());
assertNotNull(result2.getCallback());
}
@Test
public void placeholder_no_bounds_no_intrinsic_bounds() {
// when there is a placeholder and its
// * bounds are empty
// * intrinsic bounds are empty
// AsyncDrawable.this must have any non-empty bounds (otherwise result won't be rendered,
// due to missing invalidation call)
final Drawable placeholder = new AbstractDrawable() {
@Override
public int getIntrinsicWidth() {
return 0;
}
@Override
public int getIntrinsicHeight() {
return 0;
}
};
assertTrue(placeholder.getBounds().isEmpty());
final AsyncDrawableLoader loader = mock(AsyncDrawableLoader.class);
when(loader.placeholder(any(AsyncDrawable.class))).thenReturn(placeholder);
final AsyncDrawable drawable = new AsyncDrawable(
"",
loader,
mock(ImageSizeResolver.class),
null
);
final Rect bounds = drawable.getBounds();
assertFalse(bounds.toShortString(), bounds.isEmpty());
assertEquals(bounds.toShortString(), bounds, placeholder.getBounds());
}
@Test
public void placeholder_no_bounds_has_intrinsic() {
// placeholder has no bounds, but instead has intrinsic bounds
final Drawable placeholder = new AbstractDrawable() {
@Override
public int getIntrinsicWidth() {
return 42;
}
@Override
public int getIntrinsicHeight() {
return 24;
}
};
assertTrue(placeholder.getBounds().isEmpty());
final AsyncDrawableLoader loader = mock(AsyncDrawableLoader.class);
when(loader.placeholder(any(AsyncDrawable.class))).thenReturn(placeholder);
final AsyncDrawable drawable = new AsyncDrawable(
"",
loader,
mock(ImageSizeResolver.class),
null
);
final Rect bounds = drawable.getBounds();
assertFalse(bounds.isEmpty());
assertEquals(0, bounds.left);
assertEquals(42, bounds.right);
assertEquals(0, bounds.top);
assertEquals(24, bounds.bottom);
assertEquals(bounds, placeholder.getBounds());
}
@Test
public void placeholder_has_bounds() {
final Rect rect = new Rect(0, 0, 12, 99);
final Drawable placeholder = mock(Drawable.class);
when(placeholder.getBounds()).thenReturn(rect);
assertFalse(rect.isEmpty());
final AsyncDrawableLoader loader = mock(AsyncDrawableLoader.class);
when(loader.placeholder(any(AsyncDrawable.class))).thenReturn(placeholder);
final AsyncDrawable drawable = new AsyncDrawable(
"",
loader,
mock(ImageSizeResolver.class),
null
);
final Rect bounds = drawable.getBounds();
assertEquals(rect, bounds);
verify(placeholder, times(1)).getBounds();
verify(placeholder, never()).getIntrinsicWidth();
verify(placeholder, never()).getIntrinsicHeight();
verify(placeholder, never()).setBounds(any(Rect.class));
}
@Test
public void no_placeholder_empty_bounds() {
// when AsyncDrawable has no placeholder, then its bounds must be empty at the start
final AsyncDrawable drawable = new AsyncDrawable(
"",
mock(AsyncDrawableLoader.class),
mock(ImageSizeResolver.class),
null
);
assertTrue(drawable.getBounds().isEmpty());
}
private static class AbstractDrawable extends Drawable {
@Override
public void draw(@NonNull Canvas canvas) {
}
@Override
public void setAlpha(int alpha) {
}
@Override
public void setColorFilter(@Nullable ColorFilter colorFilter) {
}
@Override
public int getOpacity() {
return 0;
}
}
}
| |
package io.github.hidroh.materialistic.data;
import android.content.AsyncQueryHandler;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.CursorWrapper;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Parcel;
import android.support.annotation.NonNull;
import android.support.v4.content.LocalBroadcastManager;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.TextUtils;
import android.text.format.DateUtils;
import java.util.Set;
import io.github.hidroh.materialistic.R;
/**
* Data repository for {@link io.github.hidroh.materialistic.data.FavoriteManager.Favorite}
*/
public class FavoriteManager {
public static final int LOADER = 0;
/**
* {@link android.content.Intent#getAction()} for broadcasting clearing all favorites
*/
public static final String ACTION_CLEAR = FavoriteManager.class.getName() + ".ACTION_CLEAR";
/**
* {@link android.content.Intent#getAction()} for broadcasting getting favorites matching query
*/
public static final String ACTION_GET = FavoriteManager.class.getName() + ".ACTION_GET";
/**
* {@link android.content.Intent#getAction()} for broadcasting adding favorites
*/
public static final String ACTION_ADD = FavoriteManager.class.getName() + ".ACTION_ADD";
/**
* {@link android.content.Intent#getAction()} for broadcasting removing favorites
*/
public static final String ACTION_REMOVE = FavoriteManager.class.getName() + ".ACTION_REMOVE";
/**
* {@link android.os.Bundle} key for {@link #ACTION_GET} that contains array of
* {@link io.github.hidroh.materialistic.data.FavoriteManager.Favorite}
*/
public static final String ACTION_GET_EXTRA_DATA = ACTION_GET + ".EXTRA_DATA";
/**
* {@link android.os.Bundle} key for {@link #ACTION_ADD} that contains added favorite item ID string
*/
public static final String ACTION_ADD_EXTRA_DATA = ACTION_ADD + ".EXTRA_DATA";
/**
* {@link android.os.Bundle} key for {@link #ACTION_REMOVE} that contains array of
* {@link io.github.hidroh.materialistic.data.FavoriteManager.Favorite}
*/
public static final String ACTION_REMOVE_EXTRA_DATA = ACTION_REMOVE + ".EXTRA_DATA";
/**
* Gets all favorites matched given query, a {@link #ACTION_GET} broadcast will be sent upon
* completion
* @param context an instance of {@link android.content.Context}
* @param query query to filter stories to be retrieved
* @see #makeGetIntentFilter()
*/
public void get(Context context, String query) {
final String selection;
final String[] selectionArgs;
if (TextUtils.isEmpty(query)) {
selection = null;
selectionArgs = null;
} else {
selection = MaterialisticProvider.FavoriteEntry.COLUMN_NAME_TITLE + " LIKE ?";
selectionArgs = new String[]{"%" + query + "%"};
}
final LocalBroadcastManager broadcastManager = LocalBroadcastManager.getInstance(context);
new AsyncQueryHandler(context.getContentResolver()) {
@Override
protected void onQueryComplete(int token, Object cookie, android.database.Cursor cursor) {
super.onQueryComplete(token, cookie, cursor);
if (cursor == null) {
return;
}
Favorite[] favorites = new Favorite[cursor.getCount()];
int count = 0;
Cursor favoriteCursor = new Cursor(cursor);
boolean any = favoriteCursor.moveToFirst();
if (any) {
do {
favorites[count] = favoriteCursor.getFavorite();
count++;
} while (favoriteCursor.moveToNext());
}
broadcastManager.sendBroadcast(makeGetBroadcastIntent(favorites));
}
}.startQuery(0, null, MaterialisticProvider.URI_FAVORITE, null, selection, selectionArgs, null);
}
/**
* Adds given story as favorite, a {@link #ACTION_ADD} broadcast will be sent upon completion
* @param context an instance of {@link android.content.Context}
* @param story story to be added as favorite
* @see #makeAddIntentFilter()
*/
public void add(Context context, final ItemManager.WebItem story) {
final LocalBroadcastManager broadcastManager = LocalBroadcastManager.getInstance(context);
final ContentValues contentValues = new ContentValues();
contentValues.put(MaterialisticProvider.FavoriteEntry.COLUMN_NAME_ITEM_ID, story.getId());
contentValues.put(MaterialisticProvider.FavoriteEntry.COLUMN_NAME_URL, story.getUrl());
contentValues.put(MaterialisticProvider.FavoriteEntry.COLUMN_NAME_TITLE, story.getDisplayedTitle());
contentValues.put(MaterialisticProvider.FavoriteEntry.COLUMN_NAME_TIME, String.valueOf(System.currentTimeMillis()));
new AsyncQueryHandler(context.getContentResolver()) {
@Override
protected void onInsertComplete(int token, Object cookie, Uri uri) {
super.onInsertComplete(token, cookie, uri);
if (cookie == null || story.getId() == null) {
return;
}
if (cookie.equals(story.getId())) {
broadcastManager.sendBroadcast(makeAddBroadcastIntent(story.getId()));
}
}
}.startInsert(0, story.getId(), MaterialisticProvider.URI_FAVORITE, contentValues);
}
/**
* Clears all stories matched given query from favorites, a {@link #ACTION_CLEAR} broadcast
* will be sent upon completion
* @param context an instance of {@link android.content.Context}
* @param query query to filter stories to be cleared
* @see #makeClearIntentFilter()
*/
public void clear(Context context, String query) {
final String selection;
final String[] selectionArgs;
if (TextUtils.isEmpty(query)) {
selection = null;
selectionArgs = null;
} else {
selection = MaterialisticProvider.FavoriteEntry.COLUMN_NAME_TITLE + " LIKE ?";
selectionArgs = new String[]{"%" + query + "%"};
}
final LocalBroadcastManager broadcastManager = LocalBroadcastManager.getInstance(context);
new AsyncQueryHandler(context.getContentResolver()) {
@Override
protected void onDeleteComplete(int token, Object cookie, int result) {
super.onDeleteComplete(token, cookie, result);
broadcastManager.sendBroadcast(makeClearBroadcastIntent());
}
}.startDelete(0, null, MaterialisticProvider.URI_FAVORITE, selection, selectionArgs);
}
/**
* Checks if a story with given ID is a favorite
* @param context an instance of {@link android.content.Context}
* @param itemId story ID to check
* @param callbacks listener to be informed upon checking completed
*/
public void check(Context context, final String itemId, final OperationCallbacks callbacks) {
if (itemId == null) {
return;
}
if (callbacks == null) {
return;
}
new AsyncQueryHandler(context.getContentResolver()) {
@Override
protected void onQueryComplete(int token, Object cookie, android.database.Cursor cursor) {
super.onQueryComplete(token, cookie, cursor);
if (cookie == null) {
return;
}
if (itemId.equals(cookie)) {
callbacks.onCheckComplete(cursor.getCount() > 0);
}
}
}.startQuery(0, itemId, MaterialisticProvider.URI_FAVORITE, null,
MaterialisticProvider.FavoriteEntry.COLUMN_NAME_ITEM_ID + " = ?",
new String[]{itemId}, null);
}
/**
* Removes story with given ID from favorites, a {@link #ACTION_REMOVE} broadcast will be sent
* upon completion
* @param context an instance of {@link android.content.Context}
* @param itemId story ID to be removed from favorites
* @see #makeRemoveIntentFilter()
*/
public void remove(Context context, final String itemId) {
if (itemId == null) {
return;
}
final LocalBroadcastManager broadcastManager = LocalBroadcastManager.getInstance(context);
new AsyncQueryHandler(context.getContentResolver()) {
@Override
protected void onDeleteComplete(int token, Object cookie, int result) {
super.onDeleteComplete(token, cookie, result);
if (cookie == null || itemId == null) {
return;
}
if (cookie.equals(itemId)) {
broadcastManager.sendBroadcast(makeRemoveBroadcastIntent(itemId));
}
}
}.startDelete(0, itemId, MaterialisticProvider.URI_FAVORITE,
MaterialisticProvider.FavoriteEntry.COLUMN_NAME_ITEM_ID + " = ?",
new String[]{itemId});
}
/**
* Removes multiple stories with given IDs from favorites, a {@link #ACTION_CLEAR} broadcast will
* be sent upon completion
* @param context an instance of {@link android.content.Context}
* @param itemIds array of story IDs to be removed from favorites
* @see #makeClearIntentFilter()
*/
public void remove(Context context, Set<String> itemIds) {
if (itemIds == null || itemIds.isEmpty()) {
return;
}
final ContentResolver contentResolver = context.getContentResolver();
final LocalBroadcastManager broadcastManager = LocalBroadcastManager.getInstance(context);
new AsyncTask<String, Integer, Integer>() {
@Override
protected Integer doInBackground(String... params) {
int deleted = 0;
for (String param : params) {
deleted += contentResolver.delete(MaterialisticProvider.URI_FAVORITE,
MaterialisticProvider.FavoriteEntry.COLUMN_NAME_ITEM_ID + " = ?",
new String[]{param});
}
return deleted;
}
@Override
protected void onPostExecute(Integer integer) {
super.onPostExecute(integer);
broadcastManager.sendBroadcast(makeClearBroadcastIntent());
}
}.execute(itemIds.toArray(new String[itemIds.size()]));
}
/**
* Creates an intent filter for clear action broadcast
* @return clear intent filter
* @see #remove(android.content.Context, java.util.Set)
* @see #clear(android.content.Context, String)
*/
public static IntentFilter makeClearIntentFilter() {
return new IntentFilter(ACTION_CLEAR);
}
/**
* Creates an intent filter for get action broadcast
* @return get intent filter
* @see #get(android.content.Context, String)
*/
public static IntentFilter makeGetIntentFilter() {
return new IntentFilter(ACTION_GET);
}
/**
* Creates an intent filter for add action broadcast
* @return add intent filter
* @see #add(android.content.Context, io.github.hidroh.materialistic.data.ItemManager.WebItem)
*/
public static IntentFilter makeAddIntentFilter() {
return new IntentFilter(ACTION_ADD);
}
/**
* Creates an intent filter for remove action broadcast
* @return remove intent filter
* @see #remove(android.content.Context, String)
*/
public static IntentFilter makeRemoveIntentFilter() {
return new IntentFilter(ACTION_REMOVE);
}
private static Intent makeClearBroadcastIntent() {
return new Intent(ACTION_CLEAR);
}
private static Intent makeGetBroadcastIntent(Favorite[] favorites) {
final Intent intent = new Intent(ACTION_GET);
intent.putExtra(ACTION_GET_EXTRA_DATA, favorites);
return intent;
}
private static Intent makeAddBroadcastIntent(String itemId) {
final Intent intent = new Intent(ACTION_ADD);
intent.putExtra(ACTION_ADD_EXTRA_DATA, itemId);
return intent;
}
private static Intent makeRemoveBroadcastIntent(String itemId) {
final Intent intent = new Intent(ACTION_REMOVE);
intent.putExtra(ACTION_REMOVE_EXTRA_DATA, itemId);
return intent;
}
/**
* Represents a favorite item
*/
public static class Favorite implements ItemManager.WebItem {
private String itemId;
private String url;
private String title;
private long time;
public static final Creator<Favorite> CREATOR = new Creator<Favorite>() {
@Override
public Favorite createFromParcel(Parcel source) {
return new Favorite(source);
}
@Override
public Favorite[] newArray(int size) {
return new Favorite[size];
}
};
private Favorite(String itemId, String url, String title, long time) {
this.itemId = itemId;
this.url = url;
this.title = title;
this.time = time;
}
private Favorite(Parcel source) {
itemId = source.readString();
url = source.readString();
title = source.readString();
}
@Override
public String getUrl() {
return url;
}
@Override
public boolean isShareable() {
return true;
}
@Override
public String getId() {
return itemId;
}
@Override
public String getDisplayedTitle() {
return title;
}
@Override
public Spannable getDisplayedTime(Context context) {
return new SpannableString(context.getString(R.string.saved,
DateUtils.getRelativeDateTimeString(context, time,
DateUtils.MINUTE_IN_MILLIS,
DateUtils.YEAR_IN_MILLIS,
DateUtils.FORMAT_ABBREV_MONTH)));
}
@Override
public String getSource() {
return TextUtils.isEmpty(url) ? null : Uri.parse(url).getHost();
}
@NonNull
@Override
public String getType() {
// TODO treating all saved items as stories for now
return STORY_TYPE;
}
@Override
public String toString() {
return String.format("%s - %s", title, url);
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(itemId);
dest.writeString(url);
dest.writeString(title);
}
}
/**
* A cursor wrapper to retrieve associated {@link io.github.hidroh.materialistic.data.FavoriteManager.Favorite}
*/
public static class Cursor extends CursorWrapper {
public Cursor(android.database.Cursor cursor) {
super(cursor);
}
public Favorite getFavorite() {
final String itemId = getString(getColumnIndexOrThrow(MaterialisticProvider.FavoriteEntry.COLUMN_NAME_ITEM_ID));
final String url = getString(getColumnIndexOrThrow(MaterialisticProvider.FavoriteEntry.COLUMN_NAME_URL));
final String title = getString(getColumnIndexOrThrow(MaterialisticProvider.FavoriteEntry.COLUMN_NAME_TITLE));
final String time = getString(getColumnIndexOrThrow(MaterialisticProvider.FavoriteEntry.COLUMN_NAME_TIME));
return new Favorite(itemId, url, title, Long.valueOf(time));
}
}
/**
* A {@link android.support.v4.content.CursorLoader} to query {@link io.github.hidroh.materialistic.data.FavoriteManager.Favorite}
*/
public static class CursorLoader extends android.support.v4.content.CursorLoader {
/**
* Constructs a cursor loader to query all {@link io.github.hidroh.materialistic.data.FavoriteManager.Favorite}
* @param context an instance of {@link android.content.Context}
*/
public CursorLoader(Context context) {
super(context, MaterialisticProvider.URI_FAVORITE, null, null, null, null);
}
/**
* Constructs a cursor loader to query {@link io.github.hidroh.materialistic.data.FavoriteManager.Favorite}
* with title matching given query
* @param context an instance of {@link android.content.Context}
* @param query query to filter
*/
public CursorLoader(Context context, String query) {
super(context, MaterialisticProvider.URI_FAVORITE, null,
MaterialisticProvider.FavoriteEntry.COLUMN_NAME_TITLE + " LIKE ?",
new String[]{"%" + query + "%"}, null);
}
}
/**
* Callback interface for asynchronous favorite CRUD operations
*/
public static abstract class OperationCallbacks {
/**
* Fired when checking of favorite status is completed
* @param isFavorite true if is favorite, false otherwise
*/
public void onCheckComplete(boolean isFavorite) { }
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.projectRoots;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.project.ProjectBundle;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.Consumer;
import com.intellij.util.IconUtil;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public abstract class SdkType implements SdkTypeId {
public static ExtensionPointName<SdkType> EP_NAME = ExtensionPointName.create("com.intellij.sdkType");
private final String myName;
/**
* @return path to set up file chooser to or null if not applicable
*/
@Nullable
public abstract String suggestHomePath();
public Collection<String> suggestHomePaths() {
String s = suggestHomePath();
return s == null ? Collections.<String>emptyList() : Collections.singletonList(s);
}
/**
* If a path selected in the file chooser is not a valid SDK home path, returns an adjusted version of the path that is again
* checked for validity.
*
* @param homePath the path selected in the file chooser.
* @return the path to be used as the SDK home.
*/
public String adjustSelectedSdkHome(String homePath) {
return homePath;
}
public abstract boolean isValidSdkHome(String path);
@Override
@Nullable
public String getVersionString(@NotNull Sdk sdk) {
return getVersionString(sdk.getHomePath());
}
@Nullable
public String getVersionString(String sdkHome){
return null;
}
public abstract String suggestSdkName(String currentSdkName, String sdkHome);
public void setupSdkPaths(@NotNull Sdk sdk) {}
public boolean setupSdkPaths(final Sdk sdk, final SdkModel sdkModel) {
setupSdkPaths(sdk);
return true;
}
/**
* @return Configurable object for the sdk's additional data or null if not applicable
*/
@Nullable
public abstract AdditionalDataConfigurable createAdditionalDataConfigurable(SdkModel sdkModel, SdkModificator sdkModificator);
@Nullable
public SdkAdditionalData loadAdditionalData(Element additional) {
return null;
}
@Override
@Nullable
public SdkAdditionalData loadAdditionalData(@NotNull Sdk currentSdk, Element additional) {
return loadAdditionalData(additional);
}
public SdkType(@NotNull @NonNls String name) {
myName = name;
}
@NotNull
@Override
public String getName() {
return myName;
}
public abstract String getPresentableName();
public Icon getIcon() {
return null;
}
@NotNull
@NonNls
public String getHelpTopic() {
return "preferences.jdks";
}
public Icon getIconForAddAction() {
return IconUtil.getAddIcon();
}
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof SdkType)) return false;
final SdkType sdkType = (SdkType)o;
if (!myName.equals(sdkType.myName)) return false;
return true;
}
public int hashCode() {
return myName.hashCode();
}
public String toString() {
return getName();
}
public FileChooserDescriptor getHomeChooserDescriptor() {
final FileChooserDescriptor descriptor = new FileChooserDescriptor(false, true, false, false, false, false) {
@Override
public void validateSelectedFiles(VirtualFile[] files) throws Exception {
if (files.length != 0){
final String selectedPath = files[0].getPath();
boolean valid = isValidSdkHome(selectedPath);
if (!valid){
valid = isValidSdkHome(adjustSelectedSdkHome(selectedPath));
if (!valid) {
String message = files[0].isDirectory()
? ProjectBundle.message("sdk.configure.home.invalid.error", getPresentableName())
: ProjectBundle.message("sdk.configure.home.file.invalid.error", getPresentableName());
throw new Exception(message);
}
}
}
}
};
descriptor.setTitle(ProjectBundle.message("sdk.configure.home.title", getPresentableName()));
return descriptor;
}
public String getHomeFieldLabel() {
return ProjectBundle.message("sdk.configure.type.home.path", getPresentableName());
}
@Nullable
public String getDefaultDocumentationUrl(@NotNull final Sdk sdk) {
return null;
}
public static SdkType[] getAllTypes() {
List<SdkType> allTypes = new ArrayList<SdkType>();
Collections.addAll(allTypes, ApplicationManager.getApplication().getComponents(SdkType.class));
Collections.addAll(allTypes, Extensions.getExtensions(EP_NAME));
return allTypes.toArray(new SdkType[allTypes.size()]);
}
public static <T extends SdkType> T findInstance(final Class<T> sdkTypeClass) {
for (SdkType sdkType : Extensions.getExtensions(EP_NAME)) {
if (sdkTypeClass.equals(sdkType.getClass())) {
//noinspection unchecked
return (T)sdkType;
}
}
assert false;
return null;
}
public boolean isRootTypeApplicable(final OrderRootType type) {
return true;
}
/**
* If this method returns true, instead of showing the standard file path chooser when a new SDK of the type is created,
* the {@link #showCustomCreateUI} method is called.
*
* @return true if the custom create UI is supported, false otherwise.
* @since 12.0
*/
public boolean supportsCustomCreateUI() {
return false;
}
/**
* Shows the custom SDK create UI. The returned SDK needs to have the correct name and home path; the framework will call
* setupSdkPaths() on the returned SDK.
*
* @param sdkModel the list of SDKs currently displayed in the configuration dialog.
* @param parentComponent the parent component for showing the dialog.
* @param sdkCreatedCallback the callback to which the created SDK is passed.
* @since 12.0
*/
public void showCustomCreateUI(SdkModel sdkModel, JComponent parentComponent, Consumer<Sdk> sdkCreatedCallback) {
}
/**
* Checks if the home directory of the specified SDK is valid. By default, checks that the directory points to a valid local
* path. Can be overridden for remote SDKs.
*
* @param sdk the SDK to validate the path for.
* @return true if the home path is valid, false otherwise.
* @since 12.1
*/
public boolean sdkHasValidPath(@NotNull Sdk sdk) {
VirtualFile homeDir = sdk.getHomeDirectory();
return homeDir != null && homeDir.isValid();
}
public String sdkPath(VirtualFile homePath) {
return homePath.getPath();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.yarn.server.federation.store.impl;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Calendar;
import java.util.List;
import java.util.TimeZone;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.federation.store.FederationStateStore;
import org.apache.hadoop.yarn.server.federation.store.exception.FederationStateStoreException;
import org.apache.hadoop.yarn.server.federation.store.records.AddApplicationHomeSubClusterRequest;
import org.apache.hadoop.yarn.server.federation.store.records.AddApplicationHomeSubClusterResponse;
import org.apache.hadoop.yarn.server.federation.store.records.ApplicationHomeSubCluster;
import org.apache.hadoop.yarn.server.federation.store.records.DeleteApplicationHomeSubClusterRequest;
import org.apache.hadoop.yarn.server.federation.store.records.DeleteApplicationHomeSubClusterResponse;
import org.apache.hadoop.yarn.server.federation.store.records.GetApplicationHomeSubClusterRequest;
import org.apache.hadoop.yarn.server.federation.store.records.GetApplicationHomeSubClusterResponse;
import org.apache.hadoop.yarn.server.federation.store.records.GetApplicationsHomeSubClusterRequest;
import org.apache.hadoop.yarn.server.federation.store.records.GetApplicationsHomeSubClusterResponse;
import org.apache.hadoop.yarn.server.federation.store.records.GetSubClusterInfoRequest;
import org.apache.hadoop.yarn.server.federation.store.records.GetSubClusterInfoResponse;
import org.apache.hadoop.yarn.server.federation.store.records.GetSubClusterPoliciesConfigurationsRequest;
import org.apache.hadoop.yarn.server.federation.store.records.GetSubClusterPoliciesConfigurationsResponse;
import org.apache.hadoop.yarn.server.federation.store.records.GetSubClusterPolicyConfigurationRequest;
import org.apache.hadoop.yarn.server.federation.store.records.GetSubClusterPolicyConfigurationResponse;
import org.apache.hadoop.yarn.server.federation.store.records.GetSubClustersInfoRequest;
import org.apache.hadoop.yarn.server.federation.store.records.SetSubClusterPolicyConfigurationRequest;
import org.apache.hadoop.yarn.server.federation.store.records.SetSubClusterPolicyConfigurationResponse;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterDeregisterRequest;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterHeartbeatRequest;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterId;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterInfo;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterPolicyConfiguration;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterRegisterRequest;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterRegisterResponse;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterState;
import org.apache.hadoop.yarn.server.federation.store.records.UpdateApplicationHomeSubClusterRequest;
import org.apache.hadoop.yarn.server.federation.store.records.UpdateApplicationHomeSubClusterResponse;
import org.apache.hadoop.yarn.util.MonotonicClock;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Base class for FederationMembershipStateStore implementations.
*/
public abstract class FederationStateStoreBaseTest {
private static final MonotonicClock CLOCK = new MonotonicClock();
private FederationStateStore stateStore = createStateStore();
protected abstract FederationStateStore createStateStore();
private Configuration conf;
@Before
public void before() throws IOException, YarnException {
stateStore.init(conf);
}
@After
public void after() throws Exception {
stateStore.close();
}
// Test FederationMembershipStateStore
@Test
public void testRegisterSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
SubClusterInfo subClusterInfo = createSubClusterInfo(subClusterId);
long previousTimeStamp =
Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTimeInMillis();
SubClusterRegisterResponse result = stateStore.registerSubCluster(
SubClusterRegisterRequest.newInstance(subClusterInfo));
long currentTimeStamp =
Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTimeInMillis();
Assert.assertNotNull(result);
Assert.assertEquals(subClusterInfo, querySubClusterInfo(subClusterId));
// The saved heartbeat is between the old one and the current timestamp
Assert.assertTrue(querySubClusterInfo(subClusterId)
.getLastHeartBeat() <= currentTimeStamp);
Assert.assertTrue(querySubClusterInfo(subClusterId)
.getLastHeartBeat() >= previousTimeStamp);
}
@Test
public void testDeregisterSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
registerSubCluster(createSubClusterInfo(subClusterId));
SubClusterDeregisterRequest deregisterRequest = SubClusterDeregisterRequest
.newInstance(subClusterId, SubClusterState.SC_UNREGISTERED);
stateStore.deregisterSubCluster(deregisterRequest);
Assert.assertEquals(SubClusterState.SC_UNREGISTERED,
querySubClusterInfo(subClusterId).getState());
}
@Test
public void testDeregisterSubClusterUnknownSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
SubClusterDeregisterRequest deregisterRequest = SubClusterDeregisterRequest
.newInstance(subClusterId, SubClusterState.SC_UNREGISTERED);
try {
stateStore.deregisterSubCluster(deregisterRequest);
Assert.fail();
} catch (FederationStateStoreException e) {
Assert.assertTrue(e.getMessage().startsWith("SubCluster SC not found"));
}
}
@Test
public void testGetSubClusterInfo() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
SubClusterInfo subClusterInfo = createSubClusterInfo(subClusterId);
registerSubCluster(subClusterInfo);
GetSubClusterInfoRequest request =
GetSubClusterInfoRequest.newInstance(subClusterId);
Assert.assertEquals(subClusterInfo,
stateStore.getSubCluster(request).getSubClusterInfo());
}
@Test
public void testGetSubClusterInfoUnknownSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
GetSubClusterInfoRequest request =
GetSubClusterInfoRequest.newInstance(subClusterId);
GetSubClusterInfoResponse response = stateStore.getSubCluster(request);
Assert.assertNull(response);
}
@Test
public void testGetAllSubClustersInfo() throws Exception {
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
SubClusterInfo subClusterInfo1 = createSubClusterInfo(subClusterId1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
SubClusterInfo subClusterInfo2 = createSubClusterInfo(subClusterId2);
stateStore.registerSubCluster(
SubClusterRegisterRequest.newInstance(subClusterInfo1));
stateStore.registerSubCluster(
SubClusterRegisterRequest.newInstance(subClusterInfo2));
stateStore.subClusterHeartbeat(SubClusterHeartbeatRequest
.newInstance(subClusterId1, SubClusterState.SC_RUNNING, "capability"));
stateStore.subClusterHeartbeat(SubClusterHeartbeatRequest.newInstance(
subClusterId2, SubClusterState.SC_UNHEALTHY, "capability"));
List<SubClusterInfo> subClustersActive =
stateStore.getSubClusters(GetSubClustersInfoRequest.newInstance(true))
.getSubClusters();
List<SubClusterInfo> subClustersAll =
stateStore.getSubClusters(GetSubClustersInfoRequest.newInstance(false))
.getSubClusters();
// SC1 is the only active
Assert.assertEquals(1, subClustersActive.size());
SubClusterInfo sc1 = subClustersActive.get(0);
Assert.assertEquals(subClusterId1, sc1.getSubClusterId());
// SC1 and SC2 are the SubCluster present into the StateStore
Assert.assertEquals(2, subClustersAll.size());
Assert.assertTrue(subClustersAll.contains(sc1));
subClustersAll.remove(sc1);
SubClusterInfo sc2 = subClustersAll.get(0);
Assert.assertEquals(subClusterId2, sc2.getSubClusterId());
}
@Test
public void testSubClusterHeartbeat() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
registerSubCluster(createSubClusterInfo(subClusterId));
long previousHeartBeat =
querySubClusterInfo(subClusterId).getLastHeartBeat();
SubClusterHeartbeatRequest heartbeatRequest = SubClusterHeartbeatRequest
.newInstance(subClusterId, SubClusterState.SC_RUNNING, "capability");
stateStore.subClusterHeartbeat(heartbeatRequest);
long currentTimeStamp =
Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTimeInMillis();
Assert.assertEquals(SubClusterState.SC_RUNNING,
querySubClusterInfo(subClusterId).getState());
// The saved heartbeat is between the old one and the current timestamp
Assert.assertTrue(querySubClusterInfo(subClusterId)
.getLastHeartBeat() <= currentTimeStamp);
Assert.assertTrue(querySubClusterInfo(subClusterId)
.getLastHeartBeat() >= previousHeartBeat);
}
@Test
public void testSubClusterHeartbeatUnknownSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
SubClusterHeartbeatRequest heartbeatRequest = SubClusterHeartbeatRequest
.newInstance(subClusterId, SubClusterState.SC_RUNNING, "capability");
try {
stateStore.subClusterHeartbeat(heartbeatRequest);
Assert.fail();
} catch (FederationStateStoreException e) {
Assert.assertTrue(e.getMessage()
.startsWith("SubCluster SC does not exist; cannot heartbeat"));
}
}
// Test FederationApplicationHomeSubClusterStore
@Test
public void testAddApplicationHomeSubCluster() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId = SubClusterId.newInstance("SC");
ApplicationHomeSubCluster ahsc =
ApplicationHomeSubCluster.newInstance(appId, subClusterId);
AddApplicationHomeSubClusterRequest request =
AddApplicationHomeSubClusterRequest.newInstance(ahsc);
AddApplicationHomeSubClusterResponse response =
stateStore.addApplicationHomeSubCluster(request);
Assert.assertEquals(subClusterId, response.getHomeSubCluster());
Assert.assertEquals(subClusterId, queryApplicationHomeSC(appId));
}
@Test
public void testAddApplicationHomeSubClusterAppAlreadyExists()
throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
addApplicationHomeSC(appId, subClusterId1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
ApplicationHomeSubCluster ahsc2 =
ApplicationHomeSubCluster.newInstance(appId, subClusterId2);
AddApplicationHomeSubClusterResponse response =
stateStore.addApplicationHomeSubCluster(
AddApplicationHomeSubClusterRequest.newInstance(ahsc2));
Assert.assertEquals(subClusterId1, response.getHomeSubCluster());
Assert.assertEquals(subClusterId1, queryApplicationHomeSC(appId));
}
@Test
public void testDeleteApplicationHomeSubCluster() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId = SubClusterId.newInstance("SC");
addApplicationHomeSC(appId, subClusterId);
DeleteApplicationHomeSubClusterRequest delRequest =
DeleteApplicationHomeSubClusterRequest.newInstance(appId);
DeleteApplicationHomeSubClusterResponse response =
stateStore.deleteApplicationHomeSubCluster(delRequest);
Assert.assertNotNull(response);
try {
queryApplicationHomeSC(appId);
Assert.fail();
} catch (FederationStateStoreException e) {
Assert.assertTrue(e.getMessage()
.startsWith("Application " + appId + " does not exist"));
}
}
@Test
public void testDeleteApplicationHomeSubClusterUnknownApp() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
DeleteApplicationHomeSubClusterRequest delRequest =
DeleteApplicationHomeSubClusterRequest.newInstance(appId);
try {
stateStore.deleteApplicationHomeSubCluster(delRequest);
Assert.fail();
} catch (FederationStateStoreException e) {
Assert.assertTrue(e.getMessage()
.startsWith("Application " + appId.toString() + " does not exist"));
}
}
@Test
public void testGetApplicationHomeSubCluster() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId = SubClusterId.newInstance("SC");
addApplicationHomeSC(appId, subClusterId);
GetApplicationHomeSubClusterRequest getRequest =
GetApplicationHomeSubClusterRequest.newInstance(appId);
GetApplicationHomeSubClusterResponse result =
stateStore.getApplicationHomeSubCluster(getRequest);
Assert.assertEquals(appId,
result.getApplicationHomeSubCluster().getApplicationId());
Assert.assertEquals(subClusterId,
result.getApplicationHomeSubCluster().getHomeSubCluster());
}
@Test
public void testGetApplicationHomeSubClusterUnknownApp() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
GetApplicationHomeSubClusterRequest request =
GetApplicationHomeSubClusterRequest.newInstance(appId);
try {
stateStore.getApplicationHomeSubCluster(request);
Assert.fail();
} catch (FederationStateStoreException e) {
Assert.assertTrue(e.getMessage()
.startsWith("Application " + appId.toString() + " does not exist"));
}
}
@Test
public void testGetApplicationsHomeSubCluster() throws Exception {
ApplicationId appId1 = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
ApplicationHomeSubCluster ahsc1 =
ApplicationHomeSubCluster.newInstance(appId1, subClusterId1);
ApplicationId appId2 = ApplicationId.newInstance(1, 2);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
ApplicationHomeSubCluster ahsc2 =
ApplicationHomeSubCluster.newInstance(appId2, subClusterId2);
addApplicationHomeSC(appId1, subClusterId1);
addApplicationHomeSC(appId2, subClusterId2);
GetApplicationsHomeSubClusterRequest getRequest =
GetApplicationsHomeSubClusterRequest.newInstance();
GetApplicationsHomeSubClusterResponse result =
stateStore.getApplicationsHomeSubCluster(getRequest);
Assert.assertEquals(2, result.getAppsHomeSubClusters().size());
Assert.assertTrue(result.getAppsHomeSubClusters().contains(ahsc1));
Assert.assertTrue(result.getAppsHomeSubClusters().contains(ahsc2));
}
@Test
public void testUpdateApplicationHomeSubCluster() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
addApplicationHomeSC(appId, subClusterId1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
ApplicationHomeSubCluster ahscUpdate =
ApplicationHomeSubCluster.newInstance(appId, subClusterId2);
UpdateApplicationHomeSubClusterRequest updateRequest =
UpdateApplicationHomeSubClusterRequest.newInstance(ahscUpdate);
UpdateApplicationHomeSubClusterResponse response =
stateStore.updateApplicationHomeSubCluster(updateRequest);
Assert.assertNotNull(response);
Assert.assertEquals(subClusterId2, queryApplicationHomeSC(appId));
}
@Test
public void testUpdateApplicationHomeSubClusterUnknownApp() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
ApplicationHomeSubCluster ahsc =
ApplicationHomeSubCluster.newInstance(appId, subClusterId1);
UpdateApplicationHomeSubClusterRequest updateRequest =
UpdateApplicationHomeSubClusterRequest.newInstance(ahsc);
try {
stateStore.updateApplicationHomeSubCluster((updateRequest));
Assert.fail();
} catch (FederationStateStoreException e) {
Assert.assertTrue(e.getMessage()
.startsWith("Application " + appId.toString() + " does not exist"));
}
}
// Test FederationPolicyStore
@Test
public void testSetPolicyConfiguration() throws Exception {
SetSubClusterPolicyConfigurationRequest request =
SetSubClusterPolicyConfigurationRequest
.newInstance(createSCPolicyConf("Queue", "PolicyType"));
SetSubClusterPolicyConfigurationResponse result =
stateStore.setPolicyConfiguration(request);
Assert.assertNotNull(result);
Assert.assertEquals(createSCPolicyConf("Queue", "PolicyType"),
queryPolicy("Queue"));
}
@Test
public void testSetPolicyConfigurationUpdateExisting() throws Exception {
setPolicyConf("Queue", "PolicyType1");
SetSubClusterPolicyConfigurationRequest request2 =
SetSubClusterPolicyConfigurationRequest
.newInstance(createSCPolicyConf("Queue", "PolicyType2"));
SetSubClusterPolicyConfigurationResponse result =
stateStore.setPolicyConfiguration(request2);
Assert.assertNotNull(result);
Assert.assertEquals(createSCPolicyConf("Queue", "PolicyType2"),
queryPolicy("Queue"));
}
@Test
public void testGetPolicyConfiguration() throws Exception {
setPolicyConf("Queue", "PolicyType");
GetSubClusterPolicyConfigurationRequest getRequest =
GetSubClusterPolicyConfigurationRequest.newInstance("Queue");
GetSubClusterPolicyConfigurationResponse result =
stateStore.getPolicyConfiguration(getRequest);
Assert.assertNotNull(result);
Assert.assertEquals(createSCPolicyConf("Queue", "PolicyType"),
result.getPolicyConfiguration());
}
@Test
public void testGetPolicyConfigurationUnknownQueue() throws Exception {
GetSubClusterPolicyConfigurationRequest request =
GetSubClusterPolicyConfigurationRequest.newInstance("Queue");
GetSubClusterPolicyConfigurationResponse response =
stateStore.getPolicyConfiguration(request);
Assert.assertNull(response);
}
@Test
public void testGetPoliciesConfigurations() throws Exception {
setPolicyConf("Queue1", "PolicyType1");
setPolicyConf("Queue2", "PolicyType2");
GetSubClusterPoliciesConfigurationsResponse response =
stateStore.getPoliciesConfigurations(
GetSubClusterPoliciesConfigurationsRequest.newInstance());
Assert.assertNotNull(response);
Assert.assertNotNull(response.getPoliciesConfigs());
Assert.assertEquals(2, response.getPoliciesConfigs().size());
Assert.assertTrue(response.getPoliciesConfigs()
.contains(createSCPolicyConf("Queue1", "PolicyType1")));
Assert.assertTrue(response.getPoliciesConfigs()
.contains(createSCPolicyConf("Queue2", "PolicyType2")));
}
// Convenience methods
private SubClusterInfo createSubClusterInfo(SubClusterId subClusterId) {
String amRMAddress = "1.2.3.4:1";
String clientRMAddress = "1.2.3.4:2";
String rmAdminAddress = "1.2.3.4:3";
String webAppAddress = "1.2.3.4:4";
return SubClusterInfo.newInstance(subClusterId, amRMAddress,
clientRMAddress, rmAdminAddress, webAppAddress, SubClusterState.SC_NEW,
CLOCK.getTime(), "capability");
}
private SubClusterPolicyConfiguration createSCPolicyConf(String queueName,
String policyType) {
ByteBuffer bb = ByteBuffer.allocate(100);
bb.put((byte) 0x02);
return SubClusterPolicyConfiguration.newInstance(queueName, policyType, bb);
}
private void addApplicationHomeSC(ApplicationId appId,
SubClusterId subClusterId) throws YarnException {
ApplicationHomeSubCluster ahsc =
ApplicationHomeSubCluster.newInstance(appId, subClusterId);
AddApplicationHomeSubClusterRequest request =
AddApplicationHomeSubClusterRequest.newInstance(ahsc);
stateStore.addApplicationHomeSubCluster(request);
}
private void setPolicyConf(String queue, String policyType)
throws YarnException {
SetSubClusterPolicyConfigurationRequest request =
SetSubClusterPolicyConfigurationRequest
.newInstance(createSCPolicyConf(queue, policyType));
stateStore.setPolicyConfiguration(request);
}
private void registerSubCluster(SubClusterInfo subClusterInfo)
throws YarnException {
stateStore.registerSubCluster(
SubClusterRegisterRequest.newInstance(subClusterInfo));
}
private SubClusterInfo querySubClusterInfo(SubClusterId subClusterId)
throws YarnException {
GetSubClusterInfoRequest request =
GetSubClusterInfoRequest.newInstance(subClusterId);
return stateStore.getSubCluster(request).getSubClusterInfo();
}
private SubClusterId queryApplicationHomeSC(ApplicationId appId)
throws YarnException {
GetApplicationHomeSubClusterRequest request =
GetApplicationHomeSubClusterRequest.newInstance(appId);
GetApplicationHomeSubClusterResponse response =
stateStore.getApplicationHomeSubCluster(request);
return response.getApplicationHomeSubCluster().getHomeSubCluster();
}
private SubClusterPolicyConfiguration queryPolicy(String queue)
throws YarnException {
GetSubClusterPolicyConfigurationRequest request =
GetSubClusterPolicyConfigurationRequest.newInstance(queue);
GetSubClusterPolicyConfigurationResponse result =
stateStore.getPolicyConfiguration(request);
return result.getPolicyConfiguration();
}
protected void setConf(Configuration conf) {
this.conf = conf;
}
protected Configuration getConf() {
return conf;
}
}
| |
package org.apache.lucene.analysis.shingle;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.Random;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.tokenattributes.*;
public class ShingleFilterTest extends BaseTokenStreamTestCase {
public class TestTokenStream extends TokenStream {
protected int index = 0;
protected Token[] testToken;
private CharTermAttribute termAtt;
private OffsetAttribute offsetAtt;
private PositionIncrementAttribute posIncrAtt;
private TypeAttribute typeAtt;
public TestTokenStream(Token[] testToken) {
super();
this.testToken = testToken;
this.termAtt = addAttribute(CharTermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
this.posIncrAtt = addAttribute(PositionIncrementAttribute.class);
this.typeAtt = addAttribute(TypeAttribute.class);
}
@Override
public final boolean incrementToken() {
clearAttributes();
if (index < testToken.length) {
Token t = testToken[index++];
termAtt.copyBuffer(t.buffer(), 0, t.length());
offsetAtt.setOffset(t.startOffset(), t.endOffset());
posIncrAtt.setPositionIncrement(t.getPositionIncrement());
typeAtt.setType(TypeAttribute.DEFAULT_TYPE);
return true;
} else {
return false;
}
}
}
public static final Token[] TEST_TOKEN = new Token[] {
createToken("please", 0, 6),
createToken("divide", 7, 13),
createToken("this", 14, 18),
createToken("sentence", 19, 27),
createToken("into", 28, 32),
createToken("shingles", 33, 39),
};
public static final int[] UNIGRAM_ONLY_POSITION_INCREMENTS = new int[] {
1, 1, 1, 1, 1, 1
};
public static final String[] UNIGRAM_ONLY_TYPES = new String[] {
"word", "word", "word", "word", "word", "word"
};
public static Token[] testTokenWithHoles;
public static final Token[] BI_GRAM_TOKENS = new Token[] {
createToken("please", 0, 6),
createToken("please divide", 0, 13),
createToken("divide", 7, 13),
createToken("divide this", 7, 18),
createToken("this", 14, 18),
createToken("this sentence", 14, 27),
createToken("sentence", 19, 27),
createToken("sentence into", 19, 32),
createToken("into", 28, 32),
createToken("into shingles", 28, 39),
createToken("shingles", 33, 39),
};
public static final int[] BI_GRAM_POSITION_INCREMENTS = new int[] {
1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1
};
public static final String[] BI_GRAM_TYPES = new String[] {
"word", "shingle", "word", "shingle", "word", "shingle", "word",
"shingle", "word", "shingle", "word"
};
public static final Token[] BI_GRAM_TOKENS_WITH_HOLES = new Token[] {
createToken("please", 0, 6),
createToken("please divide", 0, 13),
createToken("divide", 7, 13),
createToken("divide _", 7, 19),
createToken("_ sentence", 19, 27),
createToken("sentence", 19, 27),
createToken("sentence _", 19, 33),
createToken("_ shingles", 33, 39),
createToken("shingles", 33, 39),
};
public static final int[] BI_GRAM_POSITION_INCREMENTS_WITH_HOLES = new int[] {
1, 0, 1, 0, 1, 1, 0, 1, 1
};
private static final String[] BI_GRAM_TYPES_WITH_HOLES = {
"word", "shingle",
"word", "shingle", "shingle", "word", "shingle", "shingle", "word"
};
public static final Token[] BI_GRAM_TOKENS_WITHOUT_UNIGRAMS = new Token[] {
createToken("please divide", 0, 13),
createToken("divide this", 7, 18),
createToken("this sentence", 14, 27),
createToken("sentence into", 19, 32),
createToken("into shingles", 28, 39),
};
public static final int[] BI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS = new int[] {
1, 1, 1, 1, 1
};
public static final String[] BI_GRAM_TYPES_WITHOUT_UNIGRAMS = new String[] {
"shingle", "shingle", "shingle", "shingle", "shingle"
};
public static final Token[] BI_GRAM_TOKENS_WITH_HOLES_WITHOUT_UNIGRAMS = new Token[] {
createToken("please divide", 0, 13),
createToken("divide _", 7, 19),
createToken("_ sentence", 19, 27),
createToken("sentence _", 19, 33),
createToken("_ shingles", 33, 39),
};
public static final int[] BI_GRAM_POSITION_INCREMENTS_WITH_HOLES_WITHOUT_UNIGRAMS = new int[] {
1, 1, 1, 1, 1, 1
};
public static final Token[] TEST_SINGLE_TOKEN = new Token[] {
createToken("please", 0, 6)
};
public static final Token[] SINGLE_TOKEN = new Token[] {
createToken("please", 0, 6)
};
public static final int[] SINGLE_TOKEN_INCREMENTS = new int[] {
1
};
public static final String[] SINGLE_TOKEN_TYPES = new String[] {
"word"
};
public static final Token[] EMPTY_TOKEN_ARRAY = new Token[] {
};
public static final int[] EMPTY_TOKEN_INCREMENTS_ARRAY = new int[] {
};
public static final String[] EMPTY_TOKEN_TYPES_ARRAY = new String[] {
};
public static final Token[] TRI_GRAM_TOKENS = new Token[] {
createToken("please", 0, 6),
createToken("please divide", 0, 13),
createToken("please divide this", 0, 18),
createToken("divide", 7, 13),
createToken("divide this", 7, 18),
createToken("divide this sentence", 7, 27),
createToken("this", 14, 18),
createToken("this sentence", 14, 27),
createToken("this sentence into", 14, 32),
createToken("sentence", 19, 27),
createToken("sentence into", 19, 32),
createToken("sentence into shingles", 19, 39),
createToken("into", 28, 32),
createToken("into shingles", 28, 39),
createToken("shingles", 33, 39)
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS = new int[] {
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES = new String[] {
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle",
"word"
};
public static final Token[] TRI_GRAM_TOKENS_WITHOUT_UNIGRAMS = new Token[] {
createToken("please divide", 0, 13),
createToken("please divide this", 0, 18),
createToken("divide this", 7, 18),
createToken("divide this sentence", 7, 27),
createToken("this sentence", 14, 27),
createToken("this sentence into", 14, 32),
createToken("sentence into", 19, 32),
createToken("sentence into shingles", 19, 39),
createToken("into shingles", 28, 39),
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS = new int[] {
1, 0, 1, 0, 1, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES_WITHOUT_UNIGRAMS = new String[] {
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle",
};
public static final Token[] FOUR_GRAM_TOKENS = new Token[] {
createToken("please", 0, 6),
createToken("please divide", 0, 13),
createToken("please divide this", 0, 18),
createToken("please divide this sentence", 0, 27),
createToken("divide", 7, 13),
createToken("divide this", 7, 18),
createToken("divide this sentence", 7, 27),
createToken("divide this sentence into", 7, 32),
createToken("this", 14, 18),
createToken("this sentence", 14, 27),
createToken("this sentence into", 14, 32),
createToken("this sentence into shingles", 14, 39),
createToken("sentence", 19, 27),
createToken("sentence into", 19, 32),
createToken("sentence into shingles", 19, 39),
createToken("into", 28, 32),
createToken("into shingles", 28, 39),
createToken("shingles", 33, 39)
};
public static final int[] FOUR_GRAM_POSITION_INCREMENTS = new int[] {
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1
};
public static final String[] FOUR_GRAM_TYPES = new String[] {
"word", "shingle", "shingle", "shingle",
"word", "shingle", "shingle", "shingle",
"word", "shingle", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle",
"word"
};
public static final Token[] FOUR_GRAM_TOKENS_WITHOUT_UNIGRAMS = new Token[] {
createToken("please divide", 0, 13),
createToken("please divide this", 0, 18),
createToken("please divide this sentence", 0, 27),
createToken("divide this", 7, 18),
createToken("divide this sentence", 7, 27),
createToken("divide this sentence into", 7, 32),
createToken("this sentence", 14, 27),
createToken("this sentence into", 14, 32),
createToken("this sentence into shingles", 14, 39),
createToken("sentence into", 19, 32),
createToken("sentence into shingles", 19, 39),
createToken("into shingles", 28, 39),
};
public static final int[] FOUR_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS = new int[] {
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1
};
public static final String[] FOUR_GRAM_TYPES_WITHOUT_UNIGRAMS = new String[] {
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
};
public static final Token[] TRI_GRAM_TOKENS_MIN_TRI_GRAM = new Token[] {
createToken("please", 0, 6),
createToken("please divide this", 0, 18),
createToken("divide", 7, 13),
createToken("divide this sentence", 7, 27),
createToken("this", 14, 18),
createToken("this sentence into", 14, 32),
createToken("sentence", 19, 27),
createToken("sentence into shingles", 19, 39),
createToken("into", 28, 32),
createToken("shingles", 33, 39)
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_MIN_TRI_GRAM = new int[] {
1, 0, 1, 0, 1, 0, 1, 0, 1, 1
};
public static final String[] TRI_GRAM_TYPES_MIN_TRI_GRAM = new String[] {
"word", "shingle",
"word", "shingle",
"word", "shingle",
"word", "shingle",
"word",
"word"
};
public static final Token[] TRI_GRAM_TOKENS_WITHOUT_UNIGRAMS_MIN_TRI_GRAM = new Token[] {
createToken("please divide this", 0, 18),
createToken("divide this sentence", 7, 27),
createToken("this sentence into", 14, 32),
createToken("sentence into shingles", 19, 39)
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_MIN_TRI_GRAM = new int[] {
1, 1, 1, 1
};
public static final String[] TRI_GRAM_TYPES_WITHOUT_UNIGRAMS_MIN_TRI_GRAM = new String[] {
"shingle",
"shingle",
"shingle",
"shingle"
};
public static final Token[] FOUR_GRAM_TOKENS_MIN_TRI_GRAM = new Token[] {
createToken("please", 0, 6),
createToken("please divide this", 0, 18),
createToken("please divide this sentence", 0, 27),
createToken("divide", 7, 13),
createToken("divide this sentence", 7, 27),
createToken("divide this sentence into", 7, 32),
createToken("this", 14, 18),
createToken("this sentence into", 14, 32),
createToken("this sentence into shingles", 14, 39),
createToken("sentence", 19, 27),
createToken("sentence into shingles", 19, 39),
createToken("into", 28, 32),
createToken("shingles", 33, 39)
};
public static final int[] FOUR_GRAM_POSITION_INCREMENTS_MIN_TRI_GRAM = new int[] {
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1
};
public static final String[] FOUR_GRAM_TYPES_MIN_TRI_GRAM = new String[] {
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle",
"word",
"word"
};
public static final Token[] FOUR_GRAM_TOKENS_WITHOUT_UNIGRAMS_MIN_TRI_GRAM = new Token[] {
createToken("please divide this", 0, 18),
createToken("please divide this sentence", 0, 27),
createToken("divide this sentence", 7, 27),
createToken("divide this sentence into", 7, 32),
createToken("this sentence into", 14, 32),
createToken("this sentence into shingles", 14, 39),
createToken("sentence into shingles", 19, 39),
};
public static final int[] FOUR_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_MIN_TRI_GRAM = new int[] {
1, 0, 1, 0, 1, 0, 1
};
public static final String[] FOUR_GRAM_TYPES_WITHOUT_UNIGRAMS_MIN_TRI_GRAM = new String[] {
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle"
};
public static final Token[] FOUR_GRAM_TOKENS_MIN_FOUR_GRAM = new Token[] {
createToken("please", 0, 6),
createToken("please divide this sentence", 0, 27),
createToken("divide", 7, 13),
createToken("divide this sentence into", 7, 32),
createToken("this", 14, 18),
createToken("this sentence into shingles", 14, 39),
createToken("sentence", 19, 27),
createToken("into", 28, 32),
createToken("shingles", 33, 39)
};
public static final int[] FOUR_GRAM_POSITION_INCREMENTS_MIN_FOUR_GRAM = new int[] {
1, 0, 1, 0, 1, 0, 1, 1, 1
};
public static final String[] FOUR_GRAM_TYPES_MIN_FOUR_GRAM = new String[] {
"word", "shingle",
"word", "shingle",
"word", "shingle",
"word",
"word",
"word"
};
public static final Token[] FOUR_GRAM_TOKENS_WITHOUT_UNIGRAMS_MIN_FOUR_GRAM = new Token[] {
createToken("please divide this sentence", 0, 27),
createToken("divide this sentence into", 7, 32),
createToken("this sentence into shingles", 14, 39),
};
public static final int[] FOUR_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_MIN_FOUR_GRAM = new int[] {
1, 1, 1
};
public static final String[] FOUR_GRAM_TYPES_WITHOUT_UNIGRAMS_MIN_FOUR_GRAM = new String[] {
"shingle",
"shingle",
"shingle"
};
public static final Token[] BI_GRAM_TOKENS_NO_SEPARATOR = new Token[] {
createToken("please", 0, 6),
createToken("pleasedivide", 0, 13),
createToken("divide", 7, 13),
createToken("dividethis", 7, 18),
createToken("this", 14, 18),
createToken("thissentence", 14, 27),
createToken("sentence", 19, 27),
createToken("sentenceinto", 19, 32),
createToken("into", 28, 32),
createToken("intoshingles", 28, 39),
createToken("shingles", 33, 39),
};
public static final int[] BI_GRAM_POSITION_INCREMENTS_NO_SEPARATOR = new int[] {
1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1
};
public static final String[] BI_GRAM_TYPES_NO_SEPARATOR = new String[] {
"word", "shingle", "word", "shingle", "word", "shingle", "word",
"shingle", "word", "shingle", "word"
};
public static final Token[] BI_GRAM_TOKENS_WITHOUT_UNIGRAMS_NO_SEPARATOR = new Token[] {
createToken("pleasedivide", 0, 13),
createToken("dividethis", 7, 18),
createToken("thissentence", 14, 27),
createToken("sentenceinto", 19, 32),
createToken("intoshingles", 28, 39),
};
public static final int[] BI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_NO_SEPARATOR = new int[] {
1, 1, 1, 1, 1
};
public static final String[] BI_GRAM_TYPES_WITHOUT_UNIGRAMS_NO_SEPARATOR = new String[] {
"shingle", "shingle", "shingle", "shingle", "shingle"
};
public static final Token[] TRI_GRAM_TOKENS_NO_SEPARATOR = new Token[] {
createToken("please", 0, 6),
createToken("pleasedivide", 0, 13),
createToken("pleasedividethis", 0, 18),
createToken("divide", 7, 13),
createToken("dividethis", 7, 18),
createToken("dividethissentence", 7, 27),
createToken("this", 14, 18),
createToken("thissentence", 14, 27),
createToken("thissentenceinto", 14, 32),
createToken("sentence", 19, 27),
createToken("sentenceinto", 19, 32),
createToken("sentenceintoshingles", 19, 39),
createToken("into", 28, 32),
createToken("intoshingles", 28, 39),
createToken("shingles", 33, 39)
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_NO_SEPARATOR = new int[] {
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES_NO_SEPARATOR = new String[] {
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle",
"word"
};
public static final Token[] TRI_GRAM_TOKENS_WITHOUT_UNIGRAMS_NO_SEPARATOR = new Token[] {
createToken("pleasedivide", 0, 13),
createToken("pleasedividethis", 0, 18),
createToken("dividethis", 7, 18),
createToken("dividethissentence", 7, 27),
createToken("thissentence", 14, 27),
createToken("thissentenceinto", 14, 32),
createToken("sentenceinto", 19, 32),
createToken("sentenceintoshingles", 19, 39),
createToken("intoshingles", 28, 39),
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_NO_SEPARATOR = new int[] {
1, 0, 1, 0, 1, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES_WITHOUT_UNIGRAMS_NO_SEPARATOR = new String[] {
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle",
};
public static final Token[] BI_GRAM_TOKENS_ALT_SEPARATOR = new Token[] {
createToken("please", 0, 6),
createToken("please<SEP>divide", 0, 13),
createToken("divide", 7, 13),
createToken("divide<SEP>this", 7, 18),
createToken("this", 14, 18),
createToken("this<SEP>sentence", 14, 27),
createToken("sentence", 19, 27),
createToken("sentence<SEP>into", 19, 32),
createToken("into", 28, 32),
createToken("into<SEP>shingles", 28, 39),
createToken("shingles", 33, 39),
};
public static final int[] BI_GRAM_POSITION_INCREMENTS_ALT_SEPARATOR = new int[] {
1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1
};
public static final String[] BI_GRAM_TYPES_ALT_SEPARATOR = new String[] {
"word", "shingle", "word", "shingle", "word", "shingle", "word",
"shingle", "word", "shingle", "word"
};
public static final Token[] BI_GRAM_TOKENS_WITHOUT_UNIGRAMS_ALT_SEPARATOR = new Token[] {
createToken("please<SEP>divide", 0, 13),
createToken("divide<SEP>this", 7, 18),
createToken("this<SEP>sentence", 14, 27),
createToken("sentence<SEP>into", 19, 32),
createToken("into<SEP>shingles", 28, 39),
};
public static final int[] BI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_ALT_SEPARATOR = new int[] {
1, 1, 1, 1, 1
};
public static final String[] BI_GRAM_TYPES_WITHOUT_UNIGRAMS_ALT_SEPARATOR = new String[] {
"shingle", "shingle", "shingle", "shingle", "shingle"
};
public static final Token[] TRI_GRAM_TOKENS_ALT_SEPARATOR = new Token[] {
createToken("please", 0, 6),
createToken("please<SEP>divide", 0, 13),
createToken("please<SEP>divide<SEP>this", 0, 18),
createToken("divide", 7, 13),
createToken("divide<SEP>this", 7, 18),
createToken("divide<SEP>this<SEP>sentence", 7, 27),
createToken("this", 14, 18),
createToken("this<SEP>sentence", 14, 27),
createToken("this<SEP>sentence<SEP>into", 14, 32),
createToken("sentence", 19, 27),
createToken("sentence<SEP>into", 19, 32),
createToken("sentence<SEP>into<SEP>shingles", 19, 39),
createToken("into", 28, 32),
createToken("into<SEP>shingles", 28, 39),
createToken("shingles", 33, 39)
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_ALT_SEPARATOR = new int[] {
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES_ALT_SEPARATOR = new String[] {
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle",
"word"
};
public static final Token[] TRI_GRAM_TOKENS_WITHOUT_UNIGRAMS_ALT_SEPARATOR = new Token[] {
createToken("please<SEP>divide", 0, 13),
createToken("please<SEP>divide<SEP>this", 0, 18),
createToken("divide<SEP>this", 7, 18),
createToken("divide<SEP>this<SEP>sentence", 7, 27),
createToken("this<SEP>sentence", 14, 27),
createToken("this<SEP>sentence<SEP>into", 14, 32),
createToken("sentence<SEP>into", 19, 32),
createToken("sentence<SEP>into<SEP>shingles", 19, 39),
createToken("into<SEP>shingles", 28, 39),
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_ALT_SEPARATOR = new int[] {
1, 0, 1, 0, 1, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES_WITHOUT_UNIGRAMS_ALT_SEPARATOR = new String[] {
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle",
};
public static final Token[] TRI_GRAM_TOKENS_NULL_SEPARATOR = new Token[] {
createToken("please", 0, 6),
createToken("pleasedivide", 0, 13),
createToken("pleasedividethis", 0, 18),
createToken("divide", 7, 13),
createToken("dividethis", 7, 18),
createToken("dividethissentence", 7, 27),
createToken("this", 14, 18),
createToken("thissentence", 14, 27),
createToken("thissentenceinto", 14, 32),
createToken("sentence", 19, 27),
createToken("sentenceinto", 19, 32),
createToken("sentenceintoshingles", 19, 39),
createToken("into", 28, 32),
createToken("intoshingles", 28, 39),
createToken("shingles", 33, 39)
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_NULL_SEPARATOR = new int[] {
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES_NULL_SEPARATOR = new String[] {
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle",
"word"
};
public static final Token[] TEST_TOKEN_POS_INCR_EQUAL_TO_N = new Token[] {
createToken("please", 0, 6),
createToken("divide", 7, 13),
createToken("this", 14, 18),
createToken("sentence", 29, 37, 3),
createToken("into", 38, 42),
createToken("shingles", 43, 49),
};
public static final Token[] TRI_GRAM_TOKENS_POS_INCR_EQUAL_TO_N = new Token[] {
createToken("please", 0, 6),
createToken("please divide", 0, 13),
createToken("please divide this", 0, 18),
createToken("divide", 7, 13),
createToken("divide this", 7, 18),
createToken("divide this _", 7, 29),
createToken("this", 14, 18),
createToken("this _", 14, 29),
createToken("this _ _", 14, 29),
createToken("_ _ sentence", 29, 37),
createToken("_ sentence", 29, 37),
createToken("_ sentence into", 29, 42),
createToken("sentence", 29, 37),
createToken("sentence into", 29, 42),
createToken("sentence into shingles", 29, 49),
createToken("into", 38, 42),
createToken("into shingles", 38, 49),
createToken("shingles", 43, 49)
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_POS_INCR_EQUAL_TO_N = new int[] {
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES_POS_INCR_EQUAL_TO_N = new String[] {
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"shingle", "shingle", "shingle", "word", "shingle", "shingle",
"word", "shingle",
"word"
};
public static final Token[] TRI_GRAM_TOKENS_POS_INCR_EQUAL_TO_N_WITHOUT_UNIGRAMS = new Token[] {
createToken("please divide", 0, 13),
createToken("please divide this", 0, 18),
createToken("divide this", 7, 18),
createToken("divide this _", 7, 29),
createToken("this _", 14, 29),
createToken("this _ _", 14, 29),
createToken("_ _ sentence", 29, 37),
createToken("_ sentence", 29, 37),
createToken("_ sentence into", 29, 42),
createToken("sentence into", 29, 42),
createToken("sentence into shingles", 29, 49),
createToken("into shingles", 38, 49),
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_POS_INCR_EQUAL_TO_N_WITHOUT_UNIGRAMS = new int[] {
1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1
};
public static final String[] TRI_GRAM_TYPES_POS_INCR_EQUAL_TO_N_WITHOUT_UNIGRAMS = new String[] {
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle", "shingle",
"shingle", "shingle",
"shingle",
};
public static final Token[] TEST_TOKEN_POS_INCR_GREATER_THAN_N = new Token[] {
createToken("please", 0, 6),
createToken("divide", 57, 63, 8),
createToken("this", 64, 68),
createToken("sentence", 69, 77),
createToken("into", 78, 82),
createToken("shingles", 83, 89),
};
public static final Token[] TRI_GRAM_TOKENS_POS_INCR_GREATER_THAN_N = new Token[] {
createToken("please", 0, 6),
createToken("please _", 0, 57),
createToken("please _ _", 0, 57),
createToken("_ _ divide", 57, 63),
createToken("_ divide", 57, 63),
createToken("_ divide this", 57, 68),
createToken("divide", 57, 63),
createToken("divide this", 57, 68),
createToken("divide this sentence", 57, 77),
createToken("this", 64, 68),
createToken("this sentence", 64, 77),
createToken("this sentence into", 64, 82),
createToken("sentence", 69, 77),
createToken("sentence into", 69, 82),
createToken("sentence into shingles", 69, 89),
createToken("into", 78, 82),
createToken("into shingles", 78, 89),
createToken("shingles", 83, 89)
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_POS_INCR_GREATER_THAN_N = new int[] {
1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES_POS_INCR_GREATER_THAN_N = new String[] {
"word", "shingle", "shingle",
"shingle",
"shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle", "shingle",
"word", "shingle",
"word"
};
public static final Token[] TRI_GRAM_TOKENS_POS_INCR_GREATER_THAN_N_WITHOUT_UNIGRAMS = new Token[] {
createToken("please _", 0, 57),
createToken("please _ _", 0, 57),
createToken("_ _ divide", 57, 63),
createToken("_ divide", 57, 63),
createToken("_ divide this", 57, 68),
createToken("divide this", 57, 68),
createToken("divide this sentence", 57, 77),
createToken("this sentence", 64, 77),
createToken("this sentence into", 64, 82),
createToken("sentence into", 69, 82),
createToken("sentence into shingles", 69, 89),
createToken("into shingles", 78, 89),
};
public static final int[] TRI_GRAM_POSITION_INCREMENTS_POS_INCR_GREATER_THAN_N_WITHOUT_UNIGRAMS = new int[] {
1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1
};
public static final String[] TRI_GRAM_TYPES_POS_INCR_GREATER_THAN_N_WITHOUT_UNIGRAMS = new String[] {
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle",
"shingle", "shingle", "shingle", "shingle", "shingle",
"shingle",
};
@Override
public void setUp() throws Exception {
super.setUp();
testTokenWithHoles = new Token[] {
createToken("please", 0, 6),
createToken("divide", 7, 13),
createToken("sentence", 19, 27, 2),
createToken("shingles", 33, 39, 2),
};
}
/*
* Class under test for void ShingleFilter(TokenStream, int)
*/
public void testBiGramFilter() throws IOException {
this.shingleFilterTest(2, TEST_TOKEN, BI_GRAM_TOKENS,
BI_GRAM_POSITION_INCREMENTS, BI_GRAM_TYPES,
true);
}
public void testBiGramFilterWithHoles() throws IOException {
this.shingleFilterTest(2, testTokenWithHoles, BI_GRAM_TOKENS_WITH_HOLES,
BI_GRAM_POSITION_INCREMENTS_WITH_HOLES,
BI_GRAM_TYPES_WITH_HOLES,
true);
}
public void testBiGramFilterWithoutUnigrams() throws IOException {
this.shingleFilterTest(2, TEST_TOKEN, BI_GRAM_TOKENS_WITHOUT_UNIGRAMS,
BI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS, BI_GRAM_TYPES_WITHOUT_UNIGRAMS,
false);
}
public void testBiGramFilterWithHolesWithoutUnigrams() throws IOException {
this.shingleFilterTest(2, testTokenWithHoles, BI_GRAM_TOKENS_WITH_HOLES_WITHOUT_UNIGRAMS,
BI_GRAM_POSITION_INCREMENTS_WITH_HOLES_WITHOUT_UNIGRAMS, BI_GRAM_TYPES_WITHOUT_UNIGRAMS,
false);
}
public void testBiGramFilterWithSingleToken() throws IOException {
this.shingleFilterTest(2, TEST_SINGLE_TOKEN, SINGLE_TOKEN,
SINGLE_TOKEN_INCREMENTS, SINGLE_TOKEN_TYPES,
true);
}
public void testBiGramFilterWithSingleTokenWithoutUnigrams() throws IOException {
this.shingleFilterTest(2, TEST_SINGLE_TOKEN, EMPTY_TOKEN_ARRAY,
EMPTY_TOKEN_INCREMENTS_ARRAY, EMPTY_TOKEN_TYPES_ARRAY,
false);
}
public void testBiGramFilterWithEmptyTokenStream() throws IOException {
this.shingleFilterTest(2, EMPTY_TOKEN_ARRAY, EMPTY_TOKEN_ARRAY,
EMPTY_TOKEN_INCREMENTS_ARRAY, EMPTY_TOKEN_TYPES_ARRAY,
true);
}
public void testBiGramFilterWithEmptyTokenStreamWithoutUnigrams() throws IOException {
this.shingleFilterTest(2, EMPTY_TOKEN_ARRAY, EMPTY_TOKEN_ARRAY,
EMPTY_TOKEN_INCREMENTS_ARRAY, EMPTY_TOKEN_TYPES_ARRAY,
false);
}
public void testTriGramFilter() throws IOException {
this.shingleFilterTest(3, TEST_TOKEN, TRI_GRAM_TOKENS,
TRI_GRAM_POSITION_INCREMENTS, TRI_GRAM_TYPES,
true);
}
public void testTriGramFilterWithoutUnigrams() throws IOException {
this.shingleFilterTest(3, TEST_TOKEN, TRI_GRAM_TOKENS_WITHOUT_UNIGRAMS,
TRI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS, TRI_GRAM_TYPES_WITHOUT_UNIGRAMS,
false);
}
public void testFourGramFilter() throws IOException {
this.shingleFilterTest(4, TEST_TOKEN, FOUR_GRAM_TOKENS,
FOUR_GRAM_POSITION_INCREMENTS, FOUR_GRAM_TYPES,
true);
}
public void testFourGramFilterWithoutUnigrams() throws IOException {
this.shingleFilterTest(4, TEST_TOKEN, FOUR_GRAM_TOKENS_WITHOUT_UNIGRAMS,
FOUR_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS,
FOUR_GRAM_TYPES_WITHOUT_UNIGRAMS, false);
}
public void testTriGramFilterMinTriGram() throws IOException {
this.shingleFilterTest(3, 3, TEST_TOKEN, TRI_GRAM_TOKENS_MIN_TRI_GRAM,
TRI_GRAM_POSITION_INCREMENTS_MIN_TRI_GRAM,
TRI_GRAM_TYPES_MIN_TRI_GRAM,
true);
}
public void testTriGramFilterWithoutUnigramsMinTriGram() throws IOException {
this.shingleFilterTest(3, 3, TEST_TOKEN,
TRI_GRAM_TOKENS_WITHOUT_UNIGRAMS_MIN_TRI_GRAM,
TRI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_MIN_TRI_GRAM,
TRI_GRAM_TYPES_WITHOUT_UNIGRAMS_MIN_TRI_GRAM,
false);
}
public void testFourGramFilterMinTriGram() throws IOException {
this.shingleFilterTest(3, 4, TEST_TOKEN, FOUR_GRAM_TOKENS_MIN_TRI_GRAM,
FOUR_GRAM_POSITION_INCREMENTS_MIN_TRI_GRAM,
FOUR_GRAM_TYPES_MIN_TRI_GRAM,
true);
}
public void testFourGramFilterWithoutUnigramsMinTriGram() throws IOException {
this.shingleFilterTest(3, 4, TEST_TOKEN,
FOUR_GRAM_TOKENS_WITHOUT_UNIGRAMS_MIN_TRI_GRAM,
FOUR_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_MIN_TRI_GRAM,
FOUR_GRAM_TYPES_WITHOUT_UNIGRAMS_MIN_TRI_GRAM, false);
}
public void testFourGramFilterMinFourGram() throws IOException {
this.shingleFilterTest(4, 4, TEST_TOKEN, FOUR_GRAM_TOKENS_MIN_FOUR_GRAM,
FOUR_GRAM_POSITION_INCREMENTS_MIN_FOUR_GRAM,
FOUR_GRAM_TYPES_MIN_FOUR_GRAM,
true);
}
public void testFourGramFilterWithoutUnigramsMinFourGram() throws IOException {
this.shingleFilterTest(4, 4, TEST_TOKEN,
FOUR_GRAM_TOKENS_WITHOUT_UNIGRAMS_MIN_FOUR_GRAM,
FOUR_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_MIN_FOUR_GRAM,
FOUR_GRAM_TYPES_WITHOUT_UNIGRAMS_MIN_FOUR_GRAM, false);
}
public void testBiGramFilterNoSeparator() throws IOException {
this.shingleFilterTest("", 2, 2, TEST_TOKEN, BI_GRAM_TOKENS_NO_SEPARATOR,
BI_GRAM_POSITION_INCREMENTS_NO_SEPARATOR,
BI_GRAM_TYPES_NO_SEPARATOR, true);
}
public void testBiGramFilterWithoutUnigramsNoSeparator() throws IOException {
this.shingleFilterTest("", 2, 2, TEST_TOKEN,
BI_GRAM_TOKENS_WITHOUT_UNIGRAMS_NO_SEPARATOR,
BI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_NO_SEPARATOR,
BI_GRAM_TYPES_WITHOUT_UNIGRAMS_NO_SEPARATOR,
false);
}
public void testTriGramFilterNoSeparator() throws IOException {
this.shingleFilterTest("", 2, 3, TEST_TOKEN, TRI_GRAM_TOKENS_NO_SEPARATOR,
TRI_GRAM_POSITION_INCREMENTS_NO_SEPARATOR,
TRI_GRAM_TYPES_NO_SEPARATOR, true);
}
public void testTriGramFilterWithoutUnigramsNoSeparator() throws IOException {
this.shingleFilterTest("", 2, 3, TEST_TOKEN,
TRI_GRAM_TOKENS_WITHOUT_UNIGRAMS_NO_SEPARATOR,
TRI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_NO_SEPARATOR,
TRI_GRAM_TYPES_WITHOUT_UNIGRAMS_NO_SEPARATOR, false);
}
public void testBiGramFilterAltSeparator() throws IOException {
this.shingleFilterTest("<SEP>", 2, 2, TEST_TOKEN, BI_GRAM_TOKENS_ALT_SEPARATOR,
BI_GRAM_POSITION_INCREMENTS_ALT_SEPARATOR,
BI_GRAM_TYPES_ALT_SEPARATOR, true);
}
public void testBiGramFilterWithoutUnigramsAltSeparator() throws IOException {
this.shingleFilterTest("<SEP>", 2, 2, TEST_TOKEN,
BI_GRAM_TOKENS_WITHOUT_UNIGRAMS_ALT_SEPARATOR,
BI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_ALT_SEPARATOR,
BI_GRAM_TYPES_WITHOUT_UNIGRAMS_ALT_SEPARATOR,
false);
}
public void testTriGramFilterAltSeparator() throws IOException {
this.shingleFilterTest("<SEP>", 2, 3, TEST_TOKEN, TRI_GRAM_TOKENS_ALT_SEPARATOR,
TRI_GRAM_POSITION_INCREMENTS_ALT_SEPARATOR,
TRI_GRAM_TYPES_ALT_SEPARATOR, true);
}
public void testTriGramFilterWithoutUnigramsAltSeparator() throws IOException {
this.shingleFilterTest("<SEP>", 2, 3, TEST_TOKEN,
TRI_GRAM_TOKENS_WITHOUT_UNIGRAMS_ALT_SEPARATOR,
TRI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS_ALT_SEPARATOR,
TRI_GRAM_TYPES_WITHOUT_UNIGRAMS_ALT_SEPARATOR, false);
}
public void testTriGramFilterNullSeparator() throws IOException {
this.shingleFilterTest(null, 2, 3, TEST_TOKEN, TRI_GRAM_TOKENS_NULL_SEPARATOR,
TRI_GRAM_POSITION_INCREMENTS_NULL_SEPARATOR,
TRI_GRAM_TYPES_NULL_SEPARATOR, true);
}
public void testPositionIncrementEqualToN() throws IOException {
this.shingleFilterTest(2, 3, TEST_TOKEN_POS_INCR_EQUAL_TO_N, TRI_GRAM_TOKENS_POS_INCR_EQUAL_TO_N,
TRI_GRAM_POSITION_INCREMENTS_POS_INCR_EQUAL_TO_N,
TRI_GRAM_TYPES_POS_INCR_EQUAL_TO_N, true);
}
public void testPositionIncrementEqualToNWithoutUnigrams() throws IOException {
this.shingleFilterTest(2, 3, TEST_TOKEN_POS_INCR_EQUAL_TO_N, TRI_GRAM_TOKENS_POS_INCR_EQUAL_TO_N_WITHOUT_UNIGRAMS,
TRI_GRAM_POSITION_INCREMENTS_POS_INCR_EQUAL_TO_N_WITHOUT_UNIGRAMS,
TRI_GRAM_TYPES_POS_INCR_EQUAL_TO_N_WITHOUT_UNIGRAMS, false);
}
public void testPositionIncrementGreaterThanN() throws IOException {
this.shingleFilterTest(2, 3, TEST_TOKEN_POS_INCR_GREATER_THAN_N, TRI_GRAM_TOKENS_POS_INCR_GREATER_THAN_N,
TRI_GRAM_POSITION_INCREMENTS_POS_INCR_GREATER_THAN_N,
TRI_GRAM_TYPES_POS_INCR_GREATER_THAN_N, true);
}
public void testPositionIncrementGreaterThanNWithoutUnigrams() throws IOException {
this.shingleFilterTest(2, 3, TEST_TOKEN_POS_INCR_GREATER_THAN_N, TRI_GRAM_TOKENS_POS_INCR_GREATER_THAN_N_WITHOUT_UNIGRAMS,
TRI_GRAM_POSITION_INCREMENTS_POS_INCR_GREATER_THAN_N_WITHOUT_UNIGRAMS,
TRI_GRAM_TYPES_POS_INCR_GREATER_THAN_N_WITHOUT_UNIGRAMS, false);
}
public void testReset() throws Exception {
Tokenizer wsTokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader("please divide this sentence"));
TokenStream filter = new ShingleFilter(wsTokenizer, 2);
assertTokenStreamContents(filter,
new String[]{"please","please divide","divide","divide this","this","this sentence","sentence"},
new int[]{0,0,7,7,14,14,19}, new int[]{6,13,13,18,18,27,27},
new String[]{TypeAttribute.DEFAULT_TYPE,"shingle",TypeAttribute.DEFAULT_TYPE,"shingle",TypeAttribute.DEFAULT_TYPE,"shingle",TypeAttribute.DEFAULT_TYPE},
new int[]{1,0,1,0,1,0,1}
);
wsTokenizer.setReader(new StringReader("please divide this sentence"));
assertTokenStreamContents(filter,
new String[]{"please","please divide","divide","divide this","this","this sentence","sentence"},
new int[]{0,0,7,7,14,14,19}, new int[]{6,13,13,18,18,27,27},
new String[]{TypeAttribute.DEFAULT_TYPE,"shingle",TypeAttribute.DEFAULT_TYPE,"shingle",TypeAttribute.DEFAULT_TYPE,"shingle",TypeAttribute.DEFAULT_TYPE},
new int[]{1,0,1,0,1,0,1}
);
}
public void testOutputUnigramsIfNoShinglesSingleTokenCase() throws IOException {
// Single token input with outputUnigrams==false is the primary case where
// enabling this option should alter program behavior.
this.shingleFilterTest(2, 2, TEST_SINGLE_TOKEN, SINGLE_TOKEN,
SINGLE_TOKEN_INCREMENTS, SINGLE_TOKEN_TYPES,
false, true);
}
public void testOutputUnigramsIfNoShinglesWithSimpleBigram() throws IOException {
// Here we expect the same result as with testBiGramFilter().
this.shingleFilterTest(2, 2, TEST_TOKEN, BI_GRAM_TOKENS,
BI_GRAM_POSITION_INCREMENTS, BI_GRAM_TYPES,
true, true);
}
public void testOutputUnigramsIfNoShinglesWithSimpleUnigramlessBigram() throws IOException {
// Here we expect the same result as with testBiGramFilterWithoutUnigrams().
this.shingleFilterTest(2, 2, TEST_TOKEN, BI_GRAM_TOKENS_WITHOUT_UNIGRAMS,
BI_GRAM_POSITION_INCREMENTS_WITHOUT_UNIGRAMS, BI_GRAM_TYPES_WITHOUT_UNIGRAMS,
false, true);
}
public void testOutputUnigramsIfNoShinglesWithMultipleInputTokens() throws IOException {
// Test when the minimum shingle size is greater than the number of input tokens
this.shingleFilterTest(7, 7, TEST_TOKEN, TEST_TOKEN,
UNIGRAM_ONLY_POSITION_INCREMENTS, UNIGRAM_ONLY_TYPES,
false, true);
}
protected void shingleFilterTest(int maxSize, Token[] tokensToShingle, Token[] tokensToCompare,
int[] positionIncrements, String[] types,
boolean outputUnigrams)
throws IOException {
ShingleFilter filter = new ShingleFilter(new TestTokenStream(tokensToShingle), maxSize);
filter.setOutputUnigrams(outputUnigrams);
shingleFilterTestCommon(filter, tokensToCompare, positionIncrements, types);
}
protected void shingleFilterTest(int minSize, int maxSize, Token[] tokensToShingle,
Token[] tokensToCompare, int[] positionIncrements,
String[] types, boolean outputUnigrams)
throws IOException {
ShingleFilter filter
= new ShingleFilter(new TestTokenStream(tokensToShingle), minSize, maxSize);
filter.setOutputUnigrams(outputUnigrams);
shingleFilterTestCommon(filter, tokensToCompare, positionIncrements, types);
}
protected void shingleFilterTest(int minSize, int maxSize, Token[] tokensToShingle,
Token[] tokensToCompare, int[] positionIncrements,
String[] types, boolean outputUnigrams,
boolean outputUnigramsIfNoShingles)
throws IOException {
ShingleFilter filter
= new ShingleFilter(new TestTokenStream(tokensToShingle), minSize, maxSize);
filter.setOutputUnigrams(outputUnigrams);
filter.setOutputUnigramsIfNoShingles(outputUnigramsIfNoShingles);
shingleFilterTestCommon(filter, tokensToCompare, positionIncrements, types);
}
protected void shingleFilterTest(String tokenSeparator, int minSize, int maxSize, Token[] tokensToShingle,
Token[] tokensToCompare, int[] positionIncrements,
String[] types, boolean outputUnigrams)
throws IOException {
ShingleFilter filter
= new ShingleFilter(new TestTokenStream(tokensToShingle), minSize, maxSize);
filter.setTokenSeparator(tokenSeparator);
filter.setOutputUnigrams(outputUnigrams);
shingleFilterTestCommon(filter, tokensToCompare, positionIncrements, types);
}
protected void shingleFilterTestCommon(ShingleFilter filter,
Token[] tokensToCompare,
int[] positionIncrements,
String[] types)
throws IOException {
String text[] = new String[tokensToCompare.length];
int startOffsets[] = new int[tokensToCompare.length];
int endOffsets[] = new int[tokensToCompare.length];
for (int i = 0; i < tokensToCompare.length; i++) {
text[i] = new String(tokensToCompare[i].buffer(),0, tokensToCompare[i].length());
startOffsets[i] = tokensToCompare[i].startOffset();
endOffsets[i] = tokensToCompare[i].endOffset();
}
assertTokenStreamContents(filter, text, startOffsets, endOffsets, types, positionIncrements);
}
private static Token createToken(String term, int start, int offset) {
return createToken(term, start, offset, 1);
}
private static Token createToken
(String term, int start, int offset, int positionIncrement)
{
Token token = new Token(start, offset);
token.copyBuffer(term.toCharArray(), 0, term.length());
token.setPositionIncrement(positionIncrement);
return token;
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer));
}
};
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192);
}
public void testEmptyTerm() throws IOException {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new KeywordTokenizer(reader);
return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer));
}
};
checkOneTermReuse(a, "", "");
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.wires.bpmn.client.commands.impl;
import java.util.Collections;
import org.junit.Test;
import org.uberfire.ext.wires.bpmn.api.model.impl.nodes.EndProcessNode;
import org.uberfire.ext.wires.bpmn.api.model.impl.nodes.ProcessNode;
import org.uberfire.ext.wires.bpmn.api.model.impl.nodes.StartProcessNode;
import org.uberfire.ext.wires.bpmn.api.model.impl.roles.DefaultRoleImpl;
import org.uberfire.ext.wires.bpmn.api.model.impl.rules.CardinalityRuleImpl;
import org.uberfire.ext.wires.bpmn.api.model.rules.Rule;
import org.uberfire.ext.wires.bpmn.client.AbstractBaseRuleTest;
import org.uberfire.ext.wires.bpmn.client.TestDummyNode;
import org.uberfire.ext.wires.bpmn.client.commands.CommandManager;
import org.uberfire.ext.wires.bpmn.client.commands.Results;
import org.uberfire.ext.wires.bpmn.client.rules.RuleManager;
import org.uberfire.ext.wires.bpmn.client.rules.impl.DefaultRuleManagerImpl;
import static junit.framework.Assert.*;
public class CommandManagerTest extends AbstractBaseRuleTest {
@Test
public void testAddOneNodeToProcess() {
final ProcessNode process = new ProcessNode();
final RuleManager ruleManager = new DefaultRuleManagerImpl();
for ( Rule rule : getContainmentRules() ) {
ruleManager.addRule( rule );
}
for ( Rule rule : getCardinalityRules() ) {
ruleManager.addRule( rule );
}
final CommandManager commandManager = new DefaultCommandManagerImpl();
final StartProcessNode node1 = new StartProcessNode();
//Add StartProcessNode to Graph
final Results results1 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node1 ) );
assertEquals( node1,
process.getNode( node1.getId() ) );
assertNotNull( results1 );
assertEquals( 0,
results1.getMessages().size() );
assertEquals( 1,
process.size() );
assertProcessContainsNodes( process,
node1 );
}
@Test
public void testAddTwoNodesToProcess() {
final ProcessNode process = new ProcessNode();
final RuleManager ruleManager = new DefaultRuleManagerImpl();
for ( Rule rule : getContainmentRules() ) {
ruleManager.addRule( rule );
}
for ( Rule rule : getCardinalityRules() ) {
ruleManager.addRule( rule );
}
final CommandManager commandManager = new DefaultCommandManagerImpl();
final StartProcessNode node1 = new StartProcessNode();
final EndProcessNode node2 = new EndProcessNode();
//Add StartProcessNode to Graph
final Results results1 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node1 ) );
assertEquals( node1,
process.getNode( node1.getId() ) );
assertNotNull( results1 );
assertEquals( 0,
results1.getMessages().size() );
//Add EndProcessNode to Graph
final Results results2 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node2 ) );
assertEquals( node2,
process.getNode( node2.getId() ) );
assertNotNull( results2 );
assertEquals( 0,
results2.getMessages().size() );
assertEquals( 2,
process.size() );
assertProcessContainsNodes( process,
node1,
node2 );
}
@Test
public void testAddTwoNodesToProcessThenUndo() {
final ProcessNode process = new ProcessNode();
final RuleManager ruleManager = new DefaultRuleManagerImpl();
for ( Rule rule : getContainmentRules() ) {
ruleManager.addRule( rule );
}
for ( Rule rule : getCardinalityRules() ) {
ruleManager.addRule( rule );
}
final CommandManager commandManager = new DefaultCommandManagerImpl();
final StartProcessNode node1 = new StartProcessNode();
final EndProcessNode node2 = new EndProcessNode();
//Add StartProcessNode to Graph
final Results results1 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node1 ) );
assertNotNull( results1 );
assertEquals( 0,
results1.getMessages().size() );
assertEquals( 1,
process.size() );
assertProcessContainsNodes( process,
node1 );
//Add EndProcessNode to Graph
final Results results2 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node2 ) );
assertNotNull( results2 );
assertEquals( 0,
results2.getMessages().size() );
assertEquals( 2,
process.size() );
assertProcessContainsNodes( process,
node1,
node2 );
//Undo last Command
commandManager.undo( ruleManager );
assertEquals( 1,
process.size() );
assertProcessContainsNodes( process,
node1 );
assertProcessNotContainsNodes( process,
node2 );
//Undo last Command
commandManager.undo( ruleManager );
assertEquals( 0,
process.size() );
assertProcessNotContainsNodes( process,
node1,
node2 );
}
@Test
public void testAddNotPermittedNodesToProcess() {
final ProcessNode process = new ProcessNode();
final RuleManager ruleManager = new DefaultRuleManagerImpl();
//Set a minimum of 0 and a maximum of 1 node
ruleManager.addRule( new CardinalityRuleImpl( "TestDummyNode Cardinality Rule",
new DefaultRoleImpl( "dummy" ),
0,
1,
Collections.EMPTY_SET,
Collections.EMPTY_SET ) );
final TestDummyNode node1 = new TestDummyNode();
final TestDummyNode node2 = new TestDummyNode();
final CommandManager commandManager = new DefaultCommandManagerImpl();
//Add StartProcessNode to Graph
final Results results1 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node1 ) );
assertNotNull( results1 );
assertEquals( 0,
results1.getMessages().size() );
assertEquals( 1,
process.size() );
assertProcessContainsNodes( process,
node1 );
//Add EndProcessNode to Graph
final Results results2 = commandManager.execute( ruleManager,
new AddGraphNodeCommand( process,
node2 ) );
assertNotNull( results2 );
assertEquals( 1,
results2.getMessages().size() );
assertEquals( 1,
process.size() );
assertProcessContainsNodes( process,
node1 );
assertProcessNotContainsNodes( process,
node2 );
//Undo last Command
commandManager.undo( ruleManager );
assertEquals( 0,
process.size() );
assertProcessNotContainsNodes( process,
node1,
node2 );
}
}
| |
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.log4j.Logger;
import azkaban.executor.ConnectorParams;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableFlowBase;
import azkaban.executor.ExecutableNode;
import azkaban.executor.ExecutionOptions;
import azkaban.executor.ExecutionOptions.FailureAction;
import azkaban.executor.Executor;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.executor.ExecutorManagerException;
import azkaban.executor.Status;
import azkaban.flow.Flow;
import azkaban.project.Project;
import azkaban.project.ProjectManager;
import azkaban.scheduler.Schedule;
import azkaban.scheduler.ScheduleManager;
import azkaban.scheduler.ScheduleManagerException;
import azkaban.server.HttpRequestUtils;
import azkaban.server.session.Session;
import azkaban.user.Permission;
import azkaban.user.Permission.Type;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.utils.FileIOUtils.LogData;
import azkaban.utils.Pair;
import azkaban.utils.Props;
import azkaban.webapp.AzkabanWebServer;
import azkaban.webapp.plugin.PluginRegistry;
import azkaban.webapp.plugin.ViewerPlugin;
public class ExecutorServlet extends LoginAbstractAzkabanServlet {
private static final Logger LOGGER =
Logger.getLogger(ExecutorServlet.class.getName());
private static final long serialVersionUID = 1L;
private ProjectManager projectManager;
private ExecutorManagerAdapter executorManager;
private ScheduleManager scheduleManager;
private ExecutorVelocityHelper velocityHelper;
private UserManager userManager;
@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
AzkabanWebServer server = (AzkabanWebServer) getApplication();
userManager = server.getUserManager();
projectManager = server.getProjectManager();
executorManager = server.getExecutorManager();
scheduleManager = server.getScheduleManager();
velocityHelper = new ExecutorVelocityHelper();
}
@Override
protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else if (hasParam(req, "execid")) {
if (hasParam(req, "job")) {
handleExecutionJobDetailsPage(req, resp, session);
} else {
handleExecutionFlowPage(req, resp, session);
}
} else {
handleExecutionsPage(req, resp, session);
}
}
private void handleAJAXAction(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException {
HashMap<String, Object> ret = new HashMap<String, Object>();
String ajaxName = getParam(req, "ajax");
if (hasParam(req, "execid")) {
int execid = getIntParam(req, "execid");
ExecutableFlow exFlow = null;
try {
exFlow = executorManager.getExecutableFlow(execid);
} catch (ExecutorManagerException e) {
ret.put("error",
"Error fetching execution '" + execid + "': " + e.getMessage());
}
if (exFlow == null) {
ret.put("error", "Cannot find execution '" + execid + "'");
} else {
if (ajaxName.equals("fetchexecflow")) {
ajaxFetchExecutableFlow(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("fetchexecflowupdate")) {
ajaxFetchExecutableFlowUpdate(req, resp, ret, session.getUser(),
exFlow);
} else if (ajaxName.equals("cancelFlow")) {
ajaxCancelFlow(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("pauseFlow")) {
ajaxPauseFlow(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("resumeFlow")) {
ajaxResumeFlow(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("fetchExecFlowLogs")) {
ajaxFetchExecFlowLogs(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("fetchExecJobLogs")) {
ajaxFetchJobLogs(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("fetchExecJobStats")) {
ajaxFetchJobStats(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("retryFailedJobs")) {
ajaxRestartFailed(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("flowInfo")) {
ajaxFetchExecutableFlowInfo(req, resp, ret, session.getUser(), exFlow);
}
}
} else if (ajaxName.equals("reloadExecutors")) {
ajaxReloadExecutors(req, resp, ret, session.getUser());
} else if (ajaxName.equals("enableQueueProcessor")) {
ajaxUpdateQueueProcessor(req, resp, ret, session.getUser(), true);
} else if (ajaxName.equals("disableQueueProcessor")) {
ajaxUpdateQueueProcessor(req, resp, ret, session.getUser(), false);
} else if (ajaxName.equals("getRunning")) {
String projectName = getParam(req, "project");
String flowName = getParam(req, "flow");
ajaxGetFlowRunning(req, resp, ret, session.getUser(), projectName,
flowName);
} else if (ajaxName.equals("flowInfo")) {
String projectName = getParam(req, "project");
String flowName = getParam(req, "flow");
ajaxFetchFlowInfo(req, resp, ret, session.getUser(), projectName,
flowName);
} else {
String projectName = getParam(req, "project");
ret.put("project", projectName);
if (ajaxName.equals("executeFlow")) {
ajaxAttemptExecuteFlow(req, resp, ret, session.getUser());
}
}
if (ret != null) {
this.writeJSON(resp, ret);
}
}
/**
* <pre>
* Enables queueProcessor if @param status is true
* disables queueProcessor if @param status is false.
* </pre>
*/
private void ajaxUpdateQueueProcessor(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> returnMap, User user,
boolean enableQueue) {
boolean wasSuccess = false;
if (HttpRequestUtils.hasPermission(userManager, user, Type.ADMIN)) {
try {
if (enableQueue) {
executorManager.enableQueueProcessorThread();
} else {
executorManager.disableQueueProcessorThread();
}
returnMap.put(ConnectorParams.STATUS_PARAM,
ConnectorParams.RESPONSE_SUCCESS);
wasSuccess = true;
} catch (ExecutorManagerException e) {
returnMap.put(ConnectorParams.RESPONSE_ERROR, e.getMessage());
}
} else {
returnMap.put(ConnectorParams.RESPONSE_ERROR,
"Only Admins are allowed to update queue processor");
}
if (!wasSuccess) {
returnMap.put(ConnectorParams.STATUS_PARAM,
ConnectorParams.RESPONSE_ERROR);
}
}
/* Reloads executors from DB and azkaban.properties via executorManager */
private void ajaxReloadExecutors(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> returnMap, User user) {
boolean wasSuccess = false;
if (HttpRequestUtils.hasPermission(userManager, user, Type.ADMIN)) {
try {
executorManager.setupExecutors();
returnMap.put(ConnectorParams.STATUS_PARAM,
ConnectorParams.RESPONSE_SUCCESS);
wasSuccess = true;
} catch (ExecutorManagerException e) {
returnMap.put(ConnectorParams.RESPONSE_ERROR,
"Failed to refresh the executors " + e.getMessage());
}
} else {
returnMap.put(ConnectorParams.RESPONSE_ERROR,
"Only Admins are allowed to refresh the executors");
}
if (!wasSuccess) {
returnMap.put(ConnectorParams.STATUS_PARAM,
ConnectorParams.RESPONSE_ERROR);
}
}
@Override
protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
}
}
private void handleExecutionJobDetailsPage(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/jobdetailspage.vm");
User user = session.getUser();
int execId = getIntParam(req, "execid");
String jobId = getParam(req, "job");
int attempt = getIntParam(req, "attempt", 0);
page.add("execid", execId);
page.add("jobid", jobId);
page.add("attempt", attempt);
ExecutableFlow flow = null;
ExecutableNode node = null;
try {
flow = executorManager.getExecutableFlow(execId);
if (flow == null) {
page.add("errorMsg", "Error loading executing flow " + execId
+ ": not found.");
page.render();
return;
}
node = flow.getExecutableNodePath(jobId);
if (node == null) {
page.add("errorMsg",
"Job " + jobId + " doesn't exist in " + flow.getExecutionId());
return;
}
List<ViewerPlugin> jobViewerPlugins =
PluginRegistry.getRegistry().getViewerPluginsForJobType(
node.getType());
page.add("jobViewerPlugins", jobViewerPlugins);
} catch (ExecutorManagerException e) {
page.add("errorMsg", "Error loading executing flow: " + e.getMessage());
page.render();
return;
}
int projectId = flow.getProjectId();
Project project =
getProjectPageByPermission(page, projectId, user, Type.READ);
if (project == null) {
page.render();
return;
}
page.add("projectName", project.getName());
page.add("flowid", flow.getId());
page.add("parentflowid", node.getParentFlow().getFlowId());
page.add("jobname", node.getId());
page.render();
}
private void handleExecutionsPage(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/executionspage.vm");
List<Pair<ExecutableFlow, Executor>> runningFlows =
executorManager.getActiveFlowsWithExecutor();
page.add("runningFlows", runningFlows.isEmpty() ? null : runningFlows);
List<ExecutableFlow> finishedFlows =
executorManager.getRecentlyFinishedFlows();
page.add("recentlyFinished", finishedFlows.isEmpty() ? null : finishedFlows);
page.add("vmutils", velocityHelper);
page.render();
}
private void handleExecutionFlowPage(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/executingflowpage.vm");
User user = session.getUser();
int execId = getIntParam(req, "execid");
page.add("execid", execId);
ExecutableFlow flow = null;
try {
flow = executorManager.getExecutableFlow(execId);
if (flow == null) {
page.add("errorMsg", "Error loading executing flow " + execId
+ " not found.");
page.render();
return;
}
} catch (ExecutorManagerException e) {
page.add("errorMsg", "Error loading executing flow: " + e.getMessage());
page.render();
return;
}
int projectId = flow.getProjectId();
Project project =
getProjectPageByPermission(page, projectId, user, Type.READ);
if (project == null) {
page.render();
return;
}
Props props = getApplication().getServerProps();
String execExternalLinkURL =
ExternalAnalyzerUtils.getExternalAnalyzer(props, req);
if(execExternalLinkURL.length() > 0) {
page.add("executionExternalLinkURL", execExternalLinkURL);
LOGGER.debug("Added an External analyzer to the page");
LOGGER.debug("External analyzer url: " + execExternalLinkURL);
String execExternalLinkLabel =
props.getString(ExternalAnalyzerUtils.EXECUTION_EXTERNAL_LINK_LABEL,
"External Analyzer");
page.add("executionExternalLinkLabel", execExternalLinkLabel);
LOGGER.debug("External analyzer label set to : " + execExternalLinkLabel);
}
page.add("projectId", project.getId());
page.add("projectName", project.getName());
page.add("flowid", flow.getFlowId());
page.render();
}
protected Project getProjectPageByPermission(Page page, int projectId,
User user, Permission.Type type) {
Project project = projectManager.getProject(projectId);
if (project == null) {
page.add("errorMsg", "Project " + project + " not found.");
} else if (!hasPermission(project, user, type)) {
page.add("errorMsg",
"User " + user.getUserId() + " doesn't have " + type.name()
+ " permissions on " + project.getName());
} else {
return project;
}
return null;
}
protected Project getProjectAjaxByPermission(Map<String, Object> ret,
String projectName, User user, Permission.Type type) {
Project project = projectManager.getProject(projectName);
if (project == null) {
ret.put("error", "Project '" + project + "' not found.");
} else if (!hasPermission(project, user, type)) {
ret.put("error",
"User '" + user.getUserId() + "' doesn't have " + type.name()
+ " permissions on " + project.getName());
} else {
return project;
}
return null;
}
protected Project getProjectAjaxByPermission(Map<String, Object> ret,
int projectId, User user, Permission.Type type) {
Project project = projectManager.getProject(projectId);
if (project == null) {
ret.put("error", "Project '" + project + "' not found.");
} else if (!hasPermission(project, user, type)) {
ret.put("error",
"User '" + user.getUserId() + "' doesn't have " + type.name()
+ " permissions on " + project.getName());
} else {
return project;
}
return null;
}
private void ajaxRestartFailed(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user,
ExecutableFlow exFlow) throws ServletException {
Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
Type.EXECUTE);
if (project == null) {
return;
}
if (exFlow.getStatus() == Status.FAILED
|| exFlow.getStatus() == Status.SUCCEEDED) {
ret.put("error", "Flow has already finished. Please re-execute.");
return;
}
try {
executorManager.retryFailures(exFlow, user.getUserId());
} catch (ExecutorManagerException e) {
ret.put("error", e.getMessage());
}
}
/**
* Gets the logs through plain text stream to reduce memory overhead.
*
* @param req
* @param resp
* @param user
* @param exFlow
* @throws ServletException
*/
private void ajaxFetchExecFlowLogs(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user,
ExecutableFlow exFlow) throws ServletException {
Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
int offset = this.getIntParam(req, "offset");
int length = this.getIntParam(req, "length");
resp.setCharacterEncoding("utf-8");
try {
LogData data =
executorManager.getExecutableFlowLog(exFlow, offset, length);
if (data == null) {
ret.put("length", 0);
ret.put("offset", offset);
ret.put("data", "");
} else {
ret.put("length", data.getLength());
ret.put("offset", data.getOffset());
ret.put("data", StringEscapeUtils.escapeHtml(data.getData()));
}
} catch (ExecutorManagerException e) {
throw new ServletException(e);
}
}
/**
* Gets the logs through ajax plain text stream to reduce memory overhead.
*
* @param req
* @param resp
* @param user
* @param exFlow
* @throws ServletException
*/
private void ajaxFetchJobLogs(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user,
ExecutableFlow exFlow) throws ServletException {
Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
int offset = this.getIntParam(req, "offset");
int length = this.getIntParam(req, "length");
String jobId = this.getParam(req, "jobId");
resp.setCharacterEncoding("utf-8");
try {
ExecutableNode node = exFlow.getExecutableNodePath(jobId);
if (node == null) {
ret.put("error",
"Job " + jobId + " doesn't exist in " + exFlow.getExecutionId());
return;
}
int attempt = this.getIntParam(req, "attempt", node.getAttempt());
LogData data =
executorManager.getExecutionJobLog(exFlow, jobId, offset, length,
attempt);
if (data == null) {
ret.put("length", 0);
ret.put("offset", offset);
ret.put("data", "");
} else {
ret.put("length", data.getLength());
ret.put("offset", data.getOffset());
ret.put("data", StringEscapeUtils.escapeHtml(data.getData()));
}
} catch (ExecutorManagerException e) {
throw new ServletException(e);
}
}
private void ajaxFetchJobStats(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user,
ExecutableFlow exFlow) throws ServletException {
Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
String jobId = this.getParam(req, "jobid");
resp.setCharacterEncoding("utf-8");
try {
ExecutableNode node = exFlow.getExecutableNodePath(jobId);
if (node == null) {
ret.put("error",
"Job " + jobId + " doesn't exist in " + exFlow.getExecutionId());
return;
}
List<Object> jsonObj =
executorManager
.getExecutionJobStats(exFlow, jobId, node.getAttempt());
ret.put("jobStats", jsonObj);
} catch (ExecutorManagerException e) {
ret.put("error", "Error retrieving stats for job " + jobId);
return;
}
}
private void ajaxFetchFlowInfo(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user,
String projectName, String flowId) throws ServletException {
Project project =
getProjectAjaxByPermission(ret, projectName, user, Type.READ);
if (project == null) {
return;
}
Flow flow = project.getFlow(flowId);
if (flow == null) {
ret.put("error", "Error loading flow. Flow " + flowId
+ " doesn't exist in " + projectName);
return;
}
ret.put("successEmails", flow.getSuccessEmails());
ret.put("failureEmails", flow.getFailureEmails());
Schedule sflow = null;
try {
for (Schedule sched : scheduleManager.getSchedules()) {
if (sched.getProjectId() == project.getId()
&& sched.getFlowName().equals(flowId)) {
sflow = sched;
break;
}
}
} catch (ScheduleManagerException e) {
// TODO Auto-generated catch block
throw new ServletException(e);
}
if (sflow != null) {
ret.put("scheduled", sflow.getNextExecTime());
}
}
private void ajaxFetchExecutableFlowInfo(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user,
ExecutableFlow exflow) throws ServletException {
Project project =
getProjectAjaxByPermission(ret, exflow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
Flow flow = project.getFlow(exflow.getFlowId());
if (flow == null) {
ret.put("error", "Error loading flow. Flow " + exflow.getFlowId()
+ " doesn't exist in " + exflow.getProjectId());
return;
}
ExecutionOptions options = exflow.getExecutionOptions();
ret.put("successEmails", options.getSuccessEmails());
ret.put("failureEmails", options.getFailureEmails());
ret.put("flowParam", options.getFlowParameters());
FailureAction action = options.getFailureAction();
String failureAction = null;
switch (action) {
case FINISH_CURRENTLY_RUNNING:
failureAction = "finishCurrent";
break;
case CANCEL_ALL:
failureAction = "cancelImmediately";
break;
case FINISH_ALL_POSSIBLE:
failureAction = "finishPossible";
break;
}
ret.put("failureAction", failureAction);
ret.put("notifyFailureFirst", options.getNotifyOnFirstFailure());
ret.put("notifyFailureLast", options.getNotifyOnLastFailure());
ret.put("failureEmailsOverride", options.isFailureEmailsOverridden());
ret.put("successEmailsOverride", options.isSuccessEmailsOverridden());
ret.put("concurrentOptions", options.getConcurrentOption());
ret.put("pipelineLevel", options.getPipelineLevel());
ret.put("pipelineExecution", options.getPipelineExecutionId());
ret.put("queueLevel", options.getQueueLevel());
HashMap<String, String> nodeStatus = new HashMap<String, String>();
for (ExecutableNode node : exflow.getExecutableNodes()) {
nodeStatus.put(node.getId(), node.getStatus().toString());
}
ret.put("nodeStatus", nodeStatus);
ret.put("disabled", options.getDisabledJobs());
}
private void ajaxCancelFlow(HttpServletRequest req, HttpServletResponse resp,
HashMap<String, Object> ret, User user, ExecutableFlow exFlow)
throws ServletException {
Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
Type.EXECUTE);
if (project == null) {
return;
}
try {
executorManager.cancelFlow(exFlow, user.getUserId());
} catch (ExecutorManagerException e) {
ret.put("error", e.getMessage());
}
}
private void ajaxGetFlowRunning(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user,
String projectId, String flowId) throws ServletException {
Project project =
getProjectAjaxByPermission(ret, projectId, user, Type.EXECUTE);
if (project == null) {
return;
}
List<Integer> refs =
executorManager.getRunningFlows(project.getId(), flowId);
if (!refs.isEmpty()) {
ret.put("execIds", refs);
}
}
private void ajaxPauseFlow(HttpServletRequest req, HttpServletResponse resp,
HashMap<String, Object> ret, User user, ExecutableFlow exFlow)
throws ServletException {
Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
Type.EXECUTE);
if (project == null) {
return;
}
try {
executorManager.pauseFlow(exFlow, user.getUserId());
} catch (ExecutorManagerException e) {
ret.put("error", e.getMessage());
}
}
private void ajaxResumeFlow(HttpServletRequest req, HttpServletResponse resp,
HashMap<String, Object> ret, User user, ExecutableFlow exFlow)
throws ServletException {
Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
Type.EXECUTE);
if (project == null) {
return;
}
try {
executorManager.resumeFlow(exFlow, user.getUserId());
} catch (ExecutorManagerException e) {
ret.put("resume", e.getMessage());
}
}
private Map<String, Object> getExecutableFlowUpdateInfo(ExecutableNode node,
long lastUpdateTime) {
HashMap<String, Object> nodeObj = new HashMap<String, Object>();
if (node instanceof ExecutableFlowBase) {
ExecutableFlowBase base = (ExecutableFlowBase) node;
ArrayList<Map<String, Object>> nodeList =
new ArrayList<Map<String, Object>>();
for (ExecutableNode subNode : base.getExecutableNodes()) {
Map<String, Object> subNodeObj =
getExecutableFlowUpdateInfo(subNode, lastUpdateTime);
if (!subNodeObj.isEmpty()) {
nodeList.add(subNodeObj);
}
}
if (!nodeList.isEmpty()) {
nodeObj.put("flow", base.getFlowId());
nodeObj.put("nodes", nodeList);
}
}
if (node.getUpdateTime() > lastUpdateTime || !nodeObj.isEmpty()) {
nodeObj.put("id", node.getId());
nodeObj.put("status", node.getStatus());
nodeObj.put("startTime", node.getStartTime());
nodeObj.put("endTime", node.getEndTime());
nodeObj.put("updateTime", node.getUpdateTime());
nodeObj.put("attempt", node.getAttempt());
if (node.getAttempt() > 0) {
nodeObj.put("pastAttempts", node.getAttemptObjects());
}
}
return nodeObj;
}
private Map<String, Object> getExecutableNodeInfo(ExecutableNode node) {
HashMap<String, Object> nodeObj = new HashMap<String, Object>();
nodeObj.put("id", node.getId());
nodeObj.put("status", node.getStatus());
nodeObj.put("startTime", node.getStartTime());
nodeObj.put("endTime", node.getEndTime());
nodeObj.put("updateTime", node.getUpdateTime());
nodeObj.put("type", node.getType());
nodeObj.put("nestedId", node.getNestedId());
nodeObj.put("attempt", node.getAttempt());
if (node.getAttempt() > 0) {
nodeObj.put("pastAttempts", node.getAttemptObjects());
}
if (node.getInNodes() != null && !node.getInNodes().isEmpty()) {
nodeObj.put("in", node.getInNodes());
}
if (node instanceof ExecutableFlowBase) {
ExecutableFlowBase base = (ExecutableFlowBase) node;
ArrayList<Map<String, Object>> nodeList =
new ArrayList<Map<String, Object>>();
for (ExecutableNode subNode : base.getExecutableNodes()) {
Map<String, Object> subNodeObj = getExecutableNodeInfo(subNode);
if (!subNodeObj.isEmpty()) {
nodeList.add(subNodeObj);
}
}
nodeObj.put("flow", base.getFlowId());
nodeObj.put("nodes", nodeList);
nodeObj.put("flowId", base.getFlowId());
}
return nodeObj;
}
private void ajaxFetchExecutableFlowUpdate(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user,
ExecutableFlow exFlow) throws ServletException {
Long lastUpdateTime = Long.parseLong(getParam(req, "lastUpdateTime"));
System.out.println("Fetching " + exFlow.getExecutionId());
Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
Map<String, Object> map =
getExecutableFlowUpdateInfo(exFlow, lastUpdateTime);
map.put("status", exFlow.getStatus());
map.put("startTime", exFlow.getStartTime());
map.put("endTime", exFlow.getEndTime());
map.put("updateTime", exFlow.getUpdateTime());
ret.putAll(map);
}
private void ajaxFetchExecutableFlow(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user,
ExecutableFlow exFlow) throws ServletException {
System.out.println("Fetching " + exFlow.getExecutionId());
Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
ret.put("submitTime", exFlow.getSubmitTime());
ret.put("submitUser", exFlow.getSubmitUser());
ret.put("execid", exFlow.getExecutionId());
ret.put("projectId", exFlow.getProjectId());
ret.put("project", project.getName());
Map<String, Object> flowObj = getExecutableNodeInfo(exFlow);
ret.putAll(flowObj);
}
private void ajaxAttemptExecuteFlow(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user)
throws ServletException {
String projectName = getParam(req, "project");
String flowId = getParam(req, "flow");
Project project =
getProjectAjaxByPermission(ret, projectName, user, Type.EXECUTE);
if (project == null) {
ret.put("error", "Project '" + projectName + "' doesn't exist.");
return;
}
ret.put("flow", flowId);
Flow flow = project.getFlow(flowId);
if (flow == null) {
ret.put("error", "Flow '" + flowId + "' cannot be found in project "
+ project);
return;
}
ajaxExecuteFlow(req, resp, ret, user);
}
private void ajaxExecuteFlow(HttpServletRequest req,
HttpServletResponse resp, HashMap<String, Object> ret, User user)
throws ServletException {
String projectName = getParam(req, "project");
String flowId = getParam(req, "flow");
Project project =
getProjectAjaxByPermission(ret, projectName, user, Type.EXECUTE);
if (project == null) {
ret.put("error", "Project '" + projectName + "' doesn't exist.");
return;
}
ret.put("flow", flowId);
Flow flow = project.getFlow(flowId);
if (flow == null) {
ret.put("error", "Flow '" + flowId + "' cannot be found in project "
+ project);
return;
}
ExecutableFlow exflow = new ExecutableFlow(project, flow);
exflow.setSubmitUser(user.getUserId());
exflow.addAllProxyUsers(project.getProxyUsers());
ExecutionOptions options = HttpRequestUtils.parseFlowOptions(req);
exflow.setExecutionOptions(options);
if (!options.isFailureEmailsOverridden()) {
options.setFailureEmails(flow.getFailureEmails());
}
if (!options.isSuccessEmailsOverridden()) {
options.setSuccessEmails(flow.getSuccessEmails());
}
options.setMailCreator(flow.getMailCreator());
try {
HttpRequestUtils.filterAdminOnlyFlowParams(userManager, options, user);
String message =
executorManager.submitExecutableFlow(exflow, user.getUserId());
ret.put("message", message);
} catch (Exception e) {
e.printStackTrace();
ret.put("error",
"Error submitting flow " + exflow.getFlowId() + ". " + e.getMessage());
}
ret.put("execid", exflow.getExecutionId());
}
public class ExecutorVelocityHelper {
public String getProjectName(int id) {
Project project = projectManager.getProject(id);
if (project == null) {
return String.valueOf(id);
}
return project.getName();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.elasticsearch.client.security.user.privileges.IndicesPrivileges;
import org.elasticsearch.client.security.user.privileges.Role;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.equalTo;
public class GetRolesResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
String json =
"{\n" +
" \"my_admin_role\": {\n" +
" \"cluster\" : [ \"all\" ],\n" +
" \"indices\" : [\n" +
" {\n" +
" \"names\" : [ \"index1\", \"index2\" ],\n" +
" \"privileges\" : [ \"all\" ],\n" +
" \"allow_restricted_indices\" : true,\n" +
" \"field_security\" : {\n" +
" \"grant\" : [ \"title\", \"body\" ]}\n" +
" }\n" +
" ],\n" +
" \"applications\" : [ ],\n" +
" \"run_as\" : [ \"other_user\" ],\n" +
" \"metadata\" : {\n" +
" \"version\" : 1\n" +
" },\n" +
" \"transient_metadata\" : {\n" +
" \"enabled\" : true\n" +
" }\n" +
" }\n" +
"}";
final GetRolesResponse response = GetRolesResponse.fromXContent((XContentType.JSON.xContent().createParser(
new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() {
@Override
public void usedDeprecatedName(String usedName, String modernName) {
}
@Override
public void usedDeprecatedField(String usedName, String replacedWith) {
}
@Override
public void deprecated(String message, Object... params) {
}
}, json)));
assertThat(response.getRoles().size(), equalTo(1));
assertThat(response.getTransientMetadataMap().size(), equalTo(1));
final Role role = response.getRoles().get(0);
assertThat(role.getName(), equalTo("my_admin_role"));
assertThat(role.getClusterPrivileges().size(), equalTo(1));
IndicesPrivileges expectedIndicesPrivileges = new IndicesPrivileges.Builder()
.indices("index1", "index2")
.privileges("all")
.grantedFields("title", "body")
.allowRestrictedIndices(true)
.build();
assertThat(role.getIndicesPrivileges().contains(expectedIndicesPrivileges), equalTo(true));
final Map<String, Object> expectedMetadata = new HashMap<>();
expectedMetadata.put("version", 1);
final Map<String, Object> expectedTransientMetadata = new HashMap<>();
expectedTransientMetadata.put("enabled", true);
assertThat(response.getTransientMetadataMap().get(role.getName()), equalTo(expectedTransientMetadata));
final Role expectedRole = Role.builder()
.name("my_admin_role")
.clusterPrivileges("all")
.indicesPrivileges(expectedIndicesPrivileges)
.runAsPrivilege("other_user")
.metadata(expectedMetadata)
.build();
assertThat(role, equalTo(expectedRole));
}
public void testEqualsHashCode() {
final List<Role> roles = new ArrayList<>();
final Map<String, Map<String, Object>> transientMetadataMap = new HashMap<>();
IndicesPrivileges indicesPrivileges = new IndicesPrivileges.Builder()
.indices("index1", "index2")
.privileges("write", "monitor", "delete")
.grantedFields("field1", "field2")
.deniedFields("field3", "field4")
.allowRestrictedIndices(true)
.build();
Map<String, Object> metadata = new HashMap<>();
metadata.put("key", "value");
final Role role = Role.builder()
.name("role_name")
.clusterPrivileges("monitor", "manage", "manage_saml")
.indicesPrivileges(indicesPrivileges)
.runAsPrivilege("run_as_user")
.metadata(metadata)
.build();
roles.add(role);
Map<String, Object> transientMetadata = new HashMap<>();
transientMetadata.put("transient_key", "transient_value");
transientMetadataMap.put(role.getName(), transientMetadata);
IndicesPrivileges indicesPrivileges2 = new IndicesPrivileges.Builder()
.indices("other_index1", "other_index2")
.privileges("write", "monitor", "delete")
.grantedFields("other_field1", "other_field2")
.deniedFields("other_field3", "other_field4")
.allowRestrictedIndices(false)
.build();
Map<String, Object> metadata2 = new HashMap<>();
metadata2.put("other_key", "other_value");
final Role role2 = Role.builder()
.name("role2_name")
.clusterPrivileges("monitor", "manage", "manage_saml")
.indicesPrivileges(indicesPrivileges2)
.runAsPrivilege("other_run_as_user")
.metadata(metadata2)
.build();
roles.add(role2);
Map<String, Object> transientMetadata2 = new HashMap<>();
transientMetadata2.put("other_transient_key", "other_transient_value");
transientMetadataMap.put(role2.getName(), transientMetadata);
final GetRolesResponse getRolesResponse = new GetRolesResponse(roles, transientMetadataMap);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(getRolesResponse, (original) -> {
return new GetRolesResponse(original.getRoles(), original.getTransientMetadataMap());
});
EqualsHashCodeTestUtils.checkEqualsAndHashCode(getRolesResponse, (original) -> {
return new GetRolesResponse(original.getRoles(), original.getTransientMetadataMap());
}, GetRolesResponseTests::mutateTestItem);
}
private static GetRolesResponse mutateTestItem(GetRolesResponse original) {
final List<Role> roles = new ArrayList<>();
final Map<String, Map<String, Object>> transientMetadataMap = new HashMap<>();
if (randomBoolean()) {
IndicesPrivileges indicesPrivileges = new IndicesPrivileges.Builder()
.indices("index1", "index2")
.privileges("write", "monitor", "delete")
.grantedFields("field1", "field2")
.deniedFields("field3", "field4")
.allowRestrictedIndices(true)
.build();
Map<String, Object> metadata = new HashMap<String, Object>();
metadata.put("key", "value");
final Role role = Role.builder()
.name("role_name")
.clusterPrivileges("monitor", "manage", "manage_saml")
.indicesPrivileges(indicesPrivileges)
.runAsPrivilege("run_as_user")
.metadata(metadata)
.build();
roles.add(role);
Map<String, Object> transientMetadata = new HashMap<>();
transientMetadata.put("transient_key", "transient_value");
transientMetadataMap.put(role.getName(), transientMetadata);
return new GetRolesResponse(roles, transientMetadataMap);
} else {
IndicesPrivileges indicesPrivileges = new IndicesPrivileges.Builder()
.indices("index1_changed", "index2")
.privileges("write", "monitor", "delete")
.grantedFields("field1", "field2")
.deniedFields("field3", "field4")
.allowRestrictedIndices(false)
.build();
Map<String, Object> metadata = new HashMap<String, Object>();
metadata.put("key", "value");
final Role role = Role.builder()
.name("role_name")
.clusterPrivileges("monitor", "manage", "manage_saml")
.indicesPrivileges(indicesPrivileges)
.runAsPrivilege("run_as_user")
.metadata(metadata)
.build();
List<Role> newRoles = original.getRoles().stream().collect(Collectors.toList());
newRoles.remove(0);
newRoles.add(role);
Map<String, Object> transientMetadata = new HashMap<>();
transientMetadata.put("transient_key", "transient_value");
transientMetadataMap.put(role.getName(), transientMetadata);
return new GetRolesResponse(newRoles, transientMetadataMap);
}
}
}
| |
// Copyright 2015 Ivan Popivanov
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package net.tradelib.misc;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
public class StrategyText {
public static String build(String dbUrl, String strategy, LocalDate date, String sep) throws Exception {
return build(dbUrl, strategy, date, sep, null, ',');
}
public static String build(String dbUrl, String strategy, LocalDate date, String sep, String csvPath, char csvSep) throws Exception {
String text = "";
Connection connection = DriverManager.getConnection(dbUrl);
List<InstrumentText> lit = StrategyText.buildList(connection, strategy, date, csvPath, csvSep);
for(InstrumentText it : lit) {
if(!it.isSection()) {
text += String.format(
"\n%20s" + sep + "%4s" + sep + "%10s" + sep + "%s",
it.getName(),
it.getSymbol(),
it.getExpiration(),
it.getStatus());
}
}
connection.close();
return text;
}
private static final String STRATEGY_QUERY =
" select c.id as cid, c.name as cname, i.comment as name, coalesce(ivar.symbol, spos.symbol) as symbol, " +
" spos.position as position, date(spos.ts/1000.0, 'unixepoch') as date, spos.last_close as close, " +
" spos.last_ts as close_date, " +
" spos.details AS details, strftime('%m''%Y',i.current_contract/1000.0,'unixepoch') as current_contract, " +
" strftime('%m''%Y',i.next_contract/1000.0,'unixepoch') as next_contract, i.trading_days as days, " +
" strftime('%m''%Y',i.current_contract2/1000.0,'unixepoch') as current_contract2, i.roll_today as roll_today " +
" from strategy_positions spos " +
" inner join strategies s on s.id = spos.strategy_id " +
" inner join instrument i on i.symbol = spos.symbol and i.provider = 'csi' " +
" left join instrument_variation ivar on spos.symbol = ivar.original_symbol and ivar.original_provider = 'csi' " +
" left join instrument_visiable iv on iv.instrument_id = i.id " +
" left join categories c on iv.categories_id = c.id " +
" WHERE s.name = ? AND strftime('%Y-%m-%d', spos.last_ts/1000.0, 'unixepoch') = strftime('%Y-%m-%d', ?/1000.0, 'unixepoch') " +
" ORDER BY cid, iv.ord";
private static final String STRATEGY_QUERY_MYSQL =
" select c.id as cid, c.name as cname, i.comment as name, coalesce(ivar.symbol, spos.symbol) as symbol, " +
" spos.position as position, date_format(spos.ts, '%Y-%m-%d') as date, spos.last_close as close, " +
" spos.last_ts as close_date, " +
" spos.details AS details, date_format(i.current_contract,'%b\\'%y') as current_contract, " +
" date_format(i.next_contract,'%b\\'%y') as next_contract, i.trading_days as days, " +
" date_format(i.current_contract2,'%b\\'%y') as current_contract2, i.roll_today as roll_today " +
" from strategy_positions spos " +
" inner join strategies s on s.id = spos.strategy_id " +
" inner join instrument i on i.symbol = spos.symbol and i.provider = 'csi' " +
" left join instrument_variation ivar on spos.symbol = ivar.original_symbol and ivar.original_provider = 'csi' " +
" left join instrument_visiable iv on iv.instrument_id = i.id " +
" left join categories c on iv.categories_id = c.id " +
" WHERE s.name = ? AND DATE(spos.last_ts) = DATE(?) " +
" ORDER BY cid, iv.ord";
public static List<InstrumentText> buildList(Connection con, String strategy, LocalDate date, String csvPath, char csvSep) throws Exception {
// public static List<InstrumentText> buildList(Connection con, String strategy, LocalDate date) throws Exception {
ArrayList<InstrumentText> result = new ArrayList<InstrumentText>();
CSVPrinter printer = null;
if(csvPath != null)
{
// Add withHeader for headers
printer = CSVFormat.DEFAULT.withDelimiter(csvSep).print(new BufferedWriter(new FileWriter(csvPath)));
}
int numCsvColumns = 12;
int rollMethod = 2;
DatabaseMetaData dmd = con.getMetaData();
String driverName = dmd.getDriverName();
String query = "";
if(driverName.startsWith("MySQL")) {
query = STRATEGY_QUERY_MYSQL;
} else {
query = STRATEGY_QUERY;
}
String prevCategory = "";
PreparedStatement pstmt = con.prepareStatement(query);
pstmt.setString(1, strategy);
pstmt.setTimestamp(2, Timestamp.valueOf(date.atStartOfDay()));
ResultSet rs = pstmt.executeQuery();
while(rs.next()) {
String category = rs.getString(2);
if(!category.equals(prevCategory)) {
result.add(InstrumentText.makeSection(category));
prevCategory = category;
if(printer != null) {
printer.print(category);
for(int ii = 1; ii < numCsvColumns; ++ii) {
printer.print("");
}
printer.println();
}
}
String name = rs.getString(3);
String symbol = rs.getString(4);
String contract = "";
if(rollMethod == 1) {
// Uses current_contract and trading_days
int ndays = rs.getInt(12);
if(ndays > 1) {
contract = rs.getString(10);
} else {
contract = "Roll to " + rs.getString(11);
}
} else if(rollMethod == 2) {
// Uses current_contract2 and roll_today
int rollToday = rs.getInt(14);
if(rollToday == 0) {
contract = rs.getString(13);
} else {
contract = "Roll to " + rs.getString(13);
}
}
if(printer != null) {
printer.print(name);
printer.print(symbol);
printer.print(contract);
}
String signal;
long position = (long)rs.getDouble(5);
JsonObject jo = new Gson().fromJson(rs.getString(9), JsonObject.class);
if(position > 0.0) {
BigDecimal entryPrice;
double pnl;
try {
entryPrice = jo.get("entry_price").getAsBigDecimal();
pnl = jo.get("pnl").getAsDouble();
} catch(Exception e) {
entryPrice = BigDecimal.valueOf(Double.MIN_VALUE);
pnl = Double.MIN_VALUE;
}
signal = String.format("Long [%d] since %s [at %s].", position, rs.getString(6), formatBigDecimal(entryPrice));
if(printer != null) printer.print(signal);
String openProfit = String.format("Open equity profit %,d.", (int)Math.floor(pnl));
signal += " " + openProfit;
if(printer != null) printer.print(openProfit);
} else if(position < 0.0) {
BigDecimal entryPrice;
double pnl;
try {
entryPrice = jo.get("entry_price").getAsBigDecimal();
pnl = jo.get("pnl").getAsDouble();
} catch(Exception e) {
entryPrice = BigDecimal.valueOf(-1);
pnl = -1;
}
signal = String.format("Short [%d] since %s [at %s].", Math.abs(position), rs.getString(6), formatBigDecimal(entryPrice));
if(printer != null) printer.print(signal);
String openProfit = String.format("Open equity profit %,d.", (int)Math.floor(pnl));
signal += " " + openProfit;
if(printer != null) printer.print(openProfit);
} else {
signal = "Out.";
if(printer != null)
{
printer.print(signal);
// An empty column follows the status if there is no position - there is no profit.
printer.print("");
}
}
boolean hasOrder = false;
JsonArray ja = jo.get("orders").getAsJsonArray();
double entryRisk;
try {
entryRisk = jo.get("entry_risk").getAsDouble();
} catch(Exception ee) {
entryRisk = Double.NaN;
}
String profitTarget;
Double profitTargetDbl;
try {
profitTarget = formatBigDecimal(jo.get("profit_target").getAsBigDecimal());
profitTargetDbl = jo.get("profit_target").getAsDouble();
} catch(Exception ee) {
profitTarget = null;
profitTargetDbl = null;
}
String stopLoss;
Double stopLossDbl;
try {
stopLoss = formatBigDecimal(jo.get("stop_loss").getAsBigDecimal());
stopLossDbl = jo.get("stop_loss").getAsDouble();
} catch(Exception ee) {
stopLoss = null;
stopLossDbl = null;
}
Double lastClose;
try {
lastClose = jo.get("last_close").getAsDouble();
} catch(Exception ee) {
lastClose = null;
}
// Currently maximum one entry and maximum one exit are supported.
String entryStr = "";
String exitStr = "";
String contractRiskStr = "";
for(int ii = 0; ii < ja.size(); ++ii) {
JsonObject jorder = ja.get(ii).getAsJsonObject();
switch(jorder.get("type").getAsString()) {
case "EXIT_LONG_STOP":
exitStr = "Exit long at stop " + formatBigDecimal(jorder.get("stop_price").getAsBigDecimal()) + ".";
signal += " " + exitStr;
break;
case "EXIT_SHORT_STOP":
exitStr = "Exit short at stop " + formatBigDecimal(jorder.get("stop_price").getAsBigDecimal()) + ".";
signal += " " + exitStr;
break;
case "ENTER_LONG":
if(!Double.isNaN(entryRisk)){
entryStr = String.format("Enter long at open. Contract risk is %s." , formatDouble(entryRisk, 0, 0));
signal += " " + entryStr;
} else {
entryStr = "Enter long at open.";
signal += " " + entryStr;
}
break;
case "ENTER_SHORT":
if(!Double.isNaN(entryRisk)){
entryStr = String.format("Enter short at open. Contract risk is %s." , formatDouble(entryRisk, 0, 0));
signal += " " + entryStr;
} else {
entryStr = "Enter short at open.";
signal += " " + entryStr;
}
break;
case "ENTER_LONG_STOP":
position = jorder.get("quantity").getAsLong();
entryStr = String.format(
"Enter long [%d] at stop %s [%s%%].",
position,
formatBigDecimal(jorder.get("stop_price").getAsBigDecimal()),
formatPercentage(jorder.get("stop_price").getAsDouble()/lastClose*100-100));
signal += " " + entryStr;
if(!Double.isNaN(entryRisk)){
contractRiskStr = String.format(" Contract risk is %s." , formatDouble(entryRisk, 0, 0));
signal += " " + contractRiskStr;
}
break;
case "ENTER_LONG_STOP_LIMIT":
position = jorder.get("quantity").getAsLong();
entryStr = String.format(
"Enter long [%d] at limit %s, stop at %s [%s%%].",
position,
formatBigDecimal(jorder.get("limit_price").getAsBigDecimal()),
formatBigDecimal(jorder.get("stop_price").getAsBigDecimal()),
formatPercentage(jorder.get("stop_price").getAsDouble()/lastClose*100-100));
signal += " " + entryStr;
if(!Double.isNaN(entryRisk)){
contractRiskStr = String.format(" Contract risk is %s." , formatDouble(entryRisk, 0, 0));
signal += contractRiskStr;
}
break;
case "ENTER_SHORT_STOP":
// signal += " Enter short at stop " + formatBigDecimal(jorder.get("stop_price").getAsBigDecimal()) + ".";
position = jorder.get("quantity").getAsLong();
entryStr = String.format(
"Enter short [%d] at stop %s [%s%%].",
Math.abs(position),
formatBigDecimal(jorder.get("stop_price").getAsBigDecimal()),
formatPercentage(jorder.get("stop_price").getAsDouble()/lastClose*100-100));
signal += " " + entryStr;
if(!Double.isNaN(entryRisk)){
contractRiskStr = String.format(" Contract risk is %s." , formatDouble(entryRisk, 0, 0));
signal += " " + contractRiskStr;
}
break;
case "ENTER_SHORT_STOP_LIMIT":
position = jorder.get("quantity").getAsLong();
entryStr = String.format(
"Enter short [%d] at limit %s, stop at %s [%s%%].",
Math.abs(position),
formatBigDecimal(jorder.get("limit_price").getAsBigDecimal()),
formatBigDecimal(jorder.get("stop_price").getAsBigDecimal()),
formatPercentage(jorder.get("stop_price").getAsDouble()/lastClose*100-100));
signal += " " + entryStr;
if(!Double.isNaN(entryRisk)){
contractRiskStr = String.format(" Contract risk is %s." , formatDouble(entryRisk, 0, 0));
signal += " " + contractRiskStr;
}
break;
case "EXIT_LONG":
exitStr = "Exit long at open.";
signal += " " + exitStr;
break;
case "EXIT_SHORT":
exitStr = "Exit short at open.";
signal += " " + exitStr;
break;
case "EXIT_SHORT_STOP_LIMIT":
exitStr = "Exit short at limit " + formatBigDecimal(jorder.get("limit_price").getAsBigDecimal()) +
", stop at " + formatBigDecimal(jorder.get("stop_price").getAsBigDecimal()) +
" [" + formatPercentage(jorder.get("stop_price").getAsDouble()/lastClose*100-100) + "%]" +
".";
signal += " " + exitStr;
break;
case "EXIT_LONG_STOP_LIMIT":
exitStr = "Exit long at limit " + formatBigDecimal(jorder.get("limit_price").getAsBigDecimal()) +
", stop at " + formatBigDecimal(jorder.get("stop_price").getAsBigDecimal()) +
" [" + formatPercentage(jorder.get("stop_price").getAsDouble()/lastClose*100-100) + "%]" +
".";
signal += " " + exitStr;
break;
}
hasOrder = true;
}
String lastCloseStr = "Last close at " + formatBigDecimal(jo.get("last_close").getAsBigDecimal()) + ".";
String stopLossStr = "";
String profitTargetStr = "";
if(hasOrder) {
signal += " " + lastCloseStr;
}
if(stopLoss != null) {
stopLossStr = "Stop loss at " + stopLoss;
if(lastClose != null && stopLossDbl != null) {
stopLossStr += " [" + formatPercentage(stopLossDbl/lastClose*100-100) + "%]";
}
stopLossStr += ".";
signal += " " + stopLossStr;
}
if(profitTarget != null) {
profitTargetStr = "Profit target at about " + profitTarget;
if(profitTargetDbl != null && lastClose != null) {
profitTargetStr += " [" + formatPercentage(profitTargetDbl/lastClose*100-100) + "%]";
}
profitTargetStr += ".";
signal += " " + profitTargetStr;
}
if(printer != null) {
printer.print(exitStr);
printer.print(entryStr);
printer.print(contractRiskStr);
printer.print(lastCloseStr);
printer.print(stopLossStr);
printer.print(profitTargetStr);
printer.println();
}
result.add(InstrumentText.make(name, symbol, contract, signal));
}
rs.close();
pstmt.close();
if(printer != null) printer.flush();
return result;
}
private static final String STRATEGY_ORDER_QUERY =
" select c.id as cid, c.name as cname, i.comment as name, coalesce(ivar.symbol, spos.symbol) as actual_symbol, " +
" spos.position as position, strftime('%Y-%m-%d', spos.ts/1000.0, 'unixepoch') as date, spos.last_close as close, " +
" spos.last_ts as close_date, " +
" spos.details AS details, strftime('%Y%m',i.current_contract/1000.0,'unixepoch') as current_contract, " +
" strftime('%Y%m',i.next_contract/1000.0,'unixepoch') as next_contract, i.trading_days as days, " +
" ie.exchange as exchange, i.type as type, " +
" strftime('%Y%m',i.current_contract2/1000.0,'unixepoch') as current_contract2, i.roll_today as roll_today " +
" from strategy_positions spos " +
" inner join strategies s on s.id = spos.strategy_id " +
" inner join instrument i on i.symbol = spos.symbol and i.provider = 'csi' " +
" left join instrument_variation ivar on spos.symbol = ivar.original_symbol and ivar.original_provider = 'csi' " +
" left join instrument_visiable iv on iv.instrument_id = i.id " +
" left join categories c on iv.categories_id = c.id " +
" left join instrument_exchange ie on coalesce(ivar.symbol, spos.symbol) = ie.symbol " +
" WHERE s.name = ? AND strftime('%Y-%m-%d', spos.last_ts/1000.0, 'unixepoch') = strftime('%Y-%m-%d', ?/1000.0, 'unixepoch') " +
" ORDER BY cid, iv.ord";
private static final String STRATEGY_ORDER_QUERY_MYSQL =
" select c.id as cid, c.name as cname, i.comment as name, coalesce(ivar.symbol, spos.symbol) as actual_symbol, " +
" spos.position as position, date_format(spos.ts, '%Y-%m-%d') as date, spos.last_close as close, " +
" spos.last_ts as close_date, " +
" spos.details AS details, date_format(i.current_contract,'%Y%m') as current_contract, " +
" date_format(i.next_contract,'%Y%m') as next_contract, i.trading_days as days, " +
" ie.exchange as exchange, i.type as type, " +
" date_format(i.current_contract2,'%Y%m') as current_contract2, i.roll_today as roll_today " +
" from strategy_positions spos " +
" inner join strategies s on s.id = spos.strategy_id " +
" inner join instrument i on i.symbol = spos.symbol and i.provider = 'csi' " +
" left join instrument_variation ivar on spos.symbol = ivar.original_symbol and ivar.original_provider = 'csi' " +
" left join instrument_visiable iv on iv.instrument_id = i.id " +
" left join categories c on iv.categories_id = c.id " +
" left join instrument_exchange ie on coalesce(ivar.symbol, spos.symbol) = ie.symbol " +
" WHERE s.name = ? AND DATE(spos.last_ts) = DATE(?) " +
" ORDER BY cid, iv.ord";
private static final String[] CSV_HEADER =
{
"Action",
"Quantity",
"Symbol",
"SecType",
"LastTradingDayOrContractMonth",
"Exchange",
"OrderType",
"LmtPrice",
"AuxPrice"
};
public static void buildOrdersCsv(String dbUrl, String strategy, LocalDate date, String csvPath) throws Exception {
Connection con = DriverManager.getConnection(dbUrl);
CSVPrinter printer = null;
if(csvPath != null)
{
// Add withHeader for headers
printer = CSVFormat.DEFAULT.withDelimiter(',').withHeader(CSV_HEADER).print(new BufferedWriter(new FileWriter(csvPath)));
}
int rollMethod = 2;
DatabaseMetaData dmd = con.getMetaData();
String driverName = dmd.getDriverName();
String query = "";
if(driverName.startsWith("MySQL")) {
query = STRATEGY_ORDER_QUERY_MYSQL;
} else {
query = STRATEGY_ORDER_QUERY;
}
PreparedStatement pstmt = con.prepareStatement(query);
pstmt.setString(1, strategy);
pstmt.setTimestamp(2, Timestamp.valueOf(date.atStartOfDay()));
ResultSet rs = pstmt.executeQuery();
while(rs.next()) {
JsonObject jo = new Gson().fromJson(rs.getString(9), JsonObject.class);
JsonArray ja = jo.get("orders").getAsJsonArray();
int ndays = rs.getInt(12);
String contract = "";
if(rollMethod == 1) {
if(ndays > 1) {
contract = rs.getString(10);
} else {
contract = rs.getString(11);
}
} else if(rollMethod == 2) {
contract = rs.getString(15);
}
for(int ii = 0; ii < ja.size(); ++ii) {
JsonObject jorder = ja.get(ii).getAsJsonObject();
switch(jorder.get("type").getAsString()) {
case "EXIT_LONG_STOP":
// Action
printer.print("SELL");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("STP");
// LmtPrice
printer.print("");
// AuxPrice
printer.print(formatOrderPrice(jorder.get("stop_price").getAsBigDecimal()));
printer.println();
break;
case "EXIT_SHORT_STOP":
// Action
printer.print("BUY");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("STP");
// LmtPrice
printer.print("");
// AuxPrice
printer.print(formatOrderPrice(jorder.get("stop_price").getAsBigDecimal()));
printer.println();
break;
case "ENTER_LONG":
// Action
printer.print("BUY");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("MKT");
// LmtPrice
printer.print("");
// AuxPrice
printer.print("");
printer.println();
break;
case "ENTER_SHORT":
// Action
printer.print("SELL");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("MKT");
// LmtPrice
printer.print("");
// AuxPrice
printer.print("");
printer.println();
break;
case "ENTER_LONG_STOP":
// Action
printer.print("BUY");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("STP");
// LmtPrice
printer.print("");
// AuxPrice
printer.print(formatOrderPrice(jorder.get("stop_price").getAsBigDecimal()));
printer.println();
break;
case "ENTER_LONG_STOP_LIMIT":
// Action
printer.print("BUY");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("STP LMT");
// LmtPrice
printer.print(formatOrderPrice(jorder.get("limit_price").getAsBigDecimal()));
// AuxPrice
printer.print(formatOrderPrice(jorder.get("stop_price").getAsBigDecimal()));
printer.println();
break;
case "ENTER_SHORT_STOP":
// Action
printer.print("SELL");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("STP");
// LmtPrice
printer.print("");
// AuxPrice
printer.print(formatOrderPrice(jorder.get("stop_price").getAsBigDecimal()));
printer.println();
break;
case "ENTER_SHORT_STOP_LIMIT":
// Action
printer.print("SELL");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("STP LMT");
// LmtPrice
printer.print(formatOrderPrice(jorder.get("limit_price").getAsBigDecimal()));
// AuxPrice
printer.print(formatOrderPrice(jorder.get("stop_price").getAsBigDecimal()));
printer.println();
break;
case "EXIT_LONG":
// Action
printer.print("SELL");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("MKT");
// LmtPrice
printer.print("");
// AuxPrice
printer.print("");
printer.println();
break;
case "EXIT_SHORT":
// Action
printer.print("BUY");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("MKT");
// LmtPrice
printer.print("");
// AuxPrice
printer.print("");
printer.println();
break;
case "EXIT_SHORT_STOP_LIMIT":
// Action
printer.print("BUY");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("STP LMT");
// LmtPrice
printer.print(formatOrderPrice(jorder.get("limit_price").getAsBigDecimal()));
// AuxPrice
printer.print(formatOrderPrice(jorder.get("stop_price").getAsBigDecimal()));
printer.println();
break;
case "EXIT_LONG_STOP_LIMIT":
// Action
printer.print("SELL");
// Quantity
printer.print(jorder.get("quantity").getAsLong());
// Symbol
printer.print(rs.getString(4));
// SecType
printer.print(rs.getString(14));
// LastTradingDayOrContractMonth
printer.print(contract);
// Exchange
printer.print(rs.getString(13));
// OrderType
printer.print("STP LMT");
// LmtPrice
printer.print(formatOrderPrice(jorder.get("limit_price").getAsBigDecimal()));
// AuxPrice
printer.print(formatOrderPrice(jorder.get("stop_price").getAsBigDecimal()));
printer.println();
break;
}
}
if(printer != null) printer.flush();
}
}
static private String formatBigDecimal(BigDecimal bd, int minPrecision, int maxPrecision) {
bd = bd.setScale(maxPrecision, RoundingMode.HALF_UP).stripTrailingZeros();
int scale = bd.scale() <= minPrecision ? minPrecision : bd.scale();
return String.format("%,." + Integer.toString(scale) + "f", bd);
}
static private String formatBigDecimal(BigDecimal bd) {
return formatBigDecimal(bd, 2, 7);
}
static private String formatDouble(double dd, int minPrecision, int maxPrecision) {
BigDecimal bd = new BigDecimal(dd, MathContext.DECIMAL128);
bd = bd.setScale(maxPrecision, RoundingMode.HALF_UP);
return formatBigDecimal(bd);
}
static private String formatDouble(double dd) {
return formatDouble(dd, 2, 7);
}
static private String formatPercentage(double dd) {
return formatDouble(dd, 2, 2);
}
static private String formatOrderPrice(BigDecimal bd) {
bd = bd.setScale(7, RoundingMode.HALF_UP).stripTrailingZeros();
int scale = bd.scale() <= 2 ? 2 : bd.scale();
return String.format("%." + Integer.toString(scale) + "f", bd);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.spark;
import com.google.gson.Gson;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.ExecuteException;
import org.apache.commons.exec.ExecuteResultHandler;
import org.apache.commons.exec.ExecuteWatchdog;
import org.apache.commons.exec.PumpStreamHandler;
import org.apache.commons.exec.environment.EnvironmentUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterHookRegistry.HookType;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.InterpreterResultMessage;
import org.apache.zeppelin.interpreter.LazyOpenInterpreter;
import org.apache.zeppelin.interpreter.WrappedInterpreter;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.interpreter.util.InterpreterOutputStream;
import org.apache.zeppelin.spark.dep.SparkDependencyContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import py4j.GatewayServer;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.net.MalformedURLException;
import java.net.ServerSocket;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
*
*/
public class PySparkInterpreter extends Interpreter implements ExecuteResultHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(PySparkInterpreter.class);
private GatewayServer gatewayServer;
private DefaultExecutor executor;
private int port;
private InterpreterOutputStream outputStream;
private BufferedWriter ins;
private PipedInputStream in;
private ByteArrayOutputStream input;
private String scriptPath;
boolean pythonscriptRunning = false;
private static final int MAX_TIMEOUT_SEC = 10;
private long pythonPid;
private IPySparkInterpreter iPySparkInterpreter;
public PySparkInterpreter(Properties property) {
super(property);
pythonPid = -1;
try {
File scriptFile = File.createTempFile("zeppelin_pyspark-", ".py");
scriptPath = scriptFile.getAbsolutePath();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void createPythonScript() throws InterpreterException {
ClassLoader classLoader = getClass().getClassLoader();
File out = new File(scriptPath);
if (out.exists() && out.isDirectory()) {
throw new InterpreterException("Can't create python script " + out.getAbsolutePath());
}
try {
FileOutputStream outStream = new FileOutputStream(out);
IOUtils.copy(
classLoader.getResourceAsStream("python/zeppelin_pyspark.py"),
outStream);
outStream.close();
} catch (IOException e) {
throw new InterpreterException(e);
}
LOGGER.info("File {} created", scriptPath);
}
@Override
public void open() throws InterpreterException {
// try IPySparkInterpreter first
iPySparkInterpreter = getIPySparkInterpreter();
if (getProperty("zeppelin.pyspark.useIPython", "true").equals("true") &&
StringUtils.isEmpty(
iPySparkInterpreter.checkIPythonPrerequisite(getPythonExec(getProperties())))) {
try {
iPySparkInterpreter.open();
if (InterpreterContext.get() != null) {
// don't print it when it is in testing, just for easy output check in test.
InterpreterContext.get().out.write(("IPython is available, " +
"use IPython for PySparkInterpreter\n")
.getBytes());
}
LOGGER.info("Use IPySparkInterpreter to replace PySparkInterpreter");
return;
} catch (Exception e) {
LOGGER.warn("Fail to open IPySparkInterpreter", e);
}
}
iPySparkInterpreter = null;
if (getProperty("zeppelin.pyspark.useIPython", "true").equals("true")) {
// don't print it when it is in testing, just for easy output check in test.
try {
InterpreterContext.get().out.write(("IPython is not available, " +
"use the native PySparkInterpreter\n")
.getBytes());
} catch (IOException e) {
LOGGER.warn("Fail to write InterpreterOutput", e);
}
}
// Add matplotlib display hook
InterpreterGroup intpGroup = getInterpreterGroup();
if (intpGroup != null && intpGroup.getInterpreterHookRegistry() != null) {
registerHook(HookType.POST_EXEC_DEV, "__zeppelin__._displayhook()");
}
DepInterpreter depInterpreter = getDepInterpreter();
// load libraries from Dependency Interpreter
URL [] urls = new URL[0];
List<URL> urlList = new LinkedList<>();
if (depInterpreter != null) {
SparkDependencyContext depc = depInterpreter.getDependencyContext();
if (depc != null) {
List<File> files = depc.getFiles();
if (files != null) {
for (File f : files) {
try {
urlList.add(f.toURI().toURL());
} catch (MalformedURLException e) {
LOGGER.error("Error", e);
}
}
}
}
}
String localRepo = getProperty("zeppelin.interpreter.localRepo");
if (localRepo != null) {
File localRepoDir = new File(localRepo);
if (localRepoDir.exists()) {
File[] files = localRepoDir.listFiles();
if (files != null) {
for (File f : files) {
try {
urlList.add(f.toURI().toURL());
} catch (MalformedURLException e) {
LOGGER.error("Error", e);
}
}
}
}
}
urls = urlList.toArray(urls);
ClassLoader oldCl = Thread.currentThread().getContextClassLoader();
try {
URLClassLoader newCl = new URLClassLoader(urls, oldCl);
Thread.currentThread().setContextClassLoader(newCl);
createGatewayServerAndStartScript();
} catch (Exception e) {
LOGGER.error("Error", e);
throw new InterpreterException(e);
} finally {
Thread.currentThread().setContextClassLoader(oldCl);
}
}
private Map setupPySparkEnv() throws IOException, InterpreterException {
Map env = EnvironmentUtils.getProcEnvironment();
// only set PYTHONPATH in local or yarn-client mode.
// yarn-cluster will setup PYTHONPATH automatically.
SparkConf conf = getSparkConf();
if (!conf.get("spark.submit.deployMode", "client").equals("cluster")) {
if (!env.containsKey("PYTHONPATH")) {
env.put("PYTHONPATH", PythonUtils.sparkPythonPath());
} else {
env.put("PYTHONPATH", PythonUtils.sparkPythonPath());
}
}
// get additional class paths when using SPARK_SUBMIT and not using YARN-CLIENT
// also, add all packages to PYTHONPATH since there might be transitive dependencies
if (SparkInterpreter.useSparkSubmit() &&
!getSparkInterpreter().isYarnMode()) {
String sparkSubmitJars = getSparkConf().get("spark.jars").replace(",", ":");
if (!"".equals(sparkSubmitJars)) {
env.put("PYTHONPATH", env.get("PYTHONPATH") + sparkSubmitJars);
}
}
LOGGER.info("PYTHONPATH: " + env.get("PYTHONPATH"));
// set PYSPARK_PYTHON
if (getSparkConf().contains("spark.pyspark.python")) {
env.put("PYSPARK_PYTHON", getSparkConf().get("spark.pyspark.python"));
}
return env;
}
// Run python shell
// Choose python in the order of
// PYSPARK_DRIVER_PYTHON > PYSPARK_PYTHON > zeppelin.pyspark.python
public static String getPythonExec(Properties properties) {
String pythonExec = properties.getProperty("zeppelin.pyspark.python", "python");
if (System.getenv("PYSPARK_PYTHON") != null) {
pythonExec = System.getenv("PYSPARK_PYTHON");
}
if (System.getenv("PYSPARK_DRIVER_PYTHON") != null) {
pythonExec = System.getenv("PYSPARK_DRIVER_PYTHON");
}
return pythonExec;
}
private void createGatewayServerAndStartScript() throws InterpreterException {
// create python script
createPythonScript();
port = findRandomOpenPortOnAllLocalInterfaces();
gatewayServer = new GatewayServer(this, port);
gatewayServer.start();
String pythonExec = getPythonExec(getProperties());
LOGGER.info("pythonExec: " + pythonExec);
CommandLine cmd = CommandLine.parse(pythonExec);
cmd.addArgument(scriptPath, false);
cmd.addArgument(Integer.toString(port), false);
cmd.addArgument(Integer.toString(getSparkInterpreter().getSparkVersion().toNumber()), false);
executor = new DefaultExecutor();
outputStream = new InterpreterOutputStream(LOGGER);
PipedOutputStream ps = new PipedOutputStream();
in = null;
try {
in = new PipedInputStream(ps);
} catch (IOException e1) {
throw new InterpreterException(e1);
}
ins = new BufferedWriter(new OutputStreamWriter(ps));
input = new ByteArrayOutputStream();
PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
executor.setStreamHandler(streamHandler);
executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
try {
Map env = setupPySparkEnv();
executor.execute(cmd, env, this);
pythonscriptRunning = true;
} catch (IOException e) {
throw new InterpreterException(e);
}
try {
input.write("import sys, getopt\n".getBytes());
ins.flush();
} catch (IOException e) {
throw new InterpreterException(e);
}
}
private int findRandomOpenPortOnAllLocalInterfaces() throws InterpreterException {
int port;
try (ServerSocket socket = new ServerSocket(0);) {
port = socket.getLocalPort();
socket.close();
} catch (IOException e) {
throw new InterpreterException(e);
}
return port;
}
@Override
public void close() throws InterpreterException {
if (iPySparkInterpreter != null) {
iPySparkInterpreter.close();
return;
}
executor.getWatchdog().destroyProcess();
new File(scriptPath).delete();
gatewayServer.shutdown();
}
PythonInterpretRequest pythonInterpretRequest = null;
/**
*
*/
public class PythonInterpretRequest {
public String statements;
public String jobGroup;
public String jobDescription;
public PythonInterpretRequest(String statements, String jobGroup,
String jobDescription) {
this.statements = statements;
this.jobGroup = jobGroup;
this.jobDescription = jobDescription;
}
public String statements() {
return statements;
}
public String jobGroup() {
return jobGroup;
}
public String jobDescription() {
return jobDescription;
}
}
Integer statementSetNotifier = new Integer(0);
public PythonInterpretRequest getStatements() {
synchronized (statementSetNotifier) {
while (pythonInterpretRequest == null) {
try {
statementSetNotifier.wait(1000);
} catch (InterruptedException e) {
}
}
PythonInterpretRequest req = pythonInterpretRequest;
pythonInterpretRequest = null;
return req;
}
}
String statementOutput = null;
boolean statementError = false;
Integer statementFinishedNotifier = new Integer(0);
public void setStatementsFinished(String out, boolean error) {
synchronized (statementFinishedNotifier) {
LOGGER.debug("Setting python statement output: " + out + ", error: " + error);
statementOutput = out;
statementError = error;
statementFinishedNotifier.notify();
}
}
boolean pythonScriptInitialized = false;
Integer pythonScriptInitializeNotifier = new Integer(0);
public void onPythonScriptInitialized(long pid) {
pythonPid = pid;
synchronized (pythonScriptInitializeNotifier) {
LOGGER.debug("onPythonScriptInitialized is called");
pythonScriptInitialized = true;
pythonScriptInitializeNotifier.notifyAll();
}
}
public void appendOutput(String message) throws IOException {
LOGGER.debug("Output from python process: " + message);
outputStream.getInterpreterOutput().write(message);
}
@Override
public InterpreterResult interpret(String st, InterpreterContext context)
throws InterpreterException {
SparkInterpreter sparkInterpreter = getSparkInterpreter();
sparkInterpreter.populateSparkWebUrl(context);
if (sparkInterpreter.isUnsupportedSparkVersion()) {
return new InterpreterResult(Code.ERROR, "Spark "
+ sparkInterpreter.getSparkVersion().toString() + " is not supported");
}
if (iPySparkInterpreter != null) {
return iPySparkInterpreter.interpret(st, context);
}
if (!pythonscriptRunning) {
return new InterpreterResult(Code.ERROR, "python process not running"
+ outputStream.toString());
}
outputStream.setInterpreterOutput(context.out);
synchronized (pythonScriptInitializeNotifier) {
long startTime = System.currentTimeMillis();
while (pythonScriptInitialized == false
&& pythonscriptRunning
&& System.currentTimeMillis() - startTime < MAX_TIMEOUT_SEC * 1000) {
try {
pythonScriptInitializeNotifier.wait(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
List<InterpreterResultMessage> errorMessage;
try {
context.out.flush();
errorMessage = context.out.toInterpreterResultMessage();
} catch (IOException e) {
throw new InterpreterException(e);
}
if (pythonscriptRunning == false) {
// python script failed to initialize and terminated
errorMessage.add(new InterpreterResultMessage(
InterpreterResult.Type.TEXT, "failed to start pyspark"));
return new InterpreterResult(Code.ERROR, errorMessage);
}
if (pythonScriptInitialized == false) {
// timeout. didn't get initialized message
errorMessage.add(new InterpreterResultMessage(
InterpreterResult.Type.TEXT, "pyspark is not responding"));
return new InterpreterResult(Code.ERROR, errorMessage);
}
if (!sparkInterpreter.getSparkVersion().isPysparkSupported()) {
errorMessage.add(new InterpreterResultMessage(
InterpreterResult.Type.TEXT,
"pyspark " + sparkInterpreter.getSparkContext().version() + " is not supported"));
return new InterpreterResult(Code.ERROR, errorMessage);
}
String jobGroup = Utils.buildJobGroupId(context);
String jobDesc = "Started by: " + Utils.getUserName(context.getAuthenticationInfo());
SparkZeppelinContext __zeppelin__ = sparkInterpreter.getZeppelinContext();
__zeppelin__.setInterpreterContext(context);
__zeppelin__.setGui(context.getGui());
__zeppelin__.setNoteGui(context.getNoteGui());
pythonInterpretRequest = new PythonInterpretRequest(st, jobGroup, jobDesc);
statementOutput = null;
synchronized (statementSetNotifier) {
statementSetNotifier.notify();
}
synchronized (statementFinishedNotifier) {
while (statementOutput == null) {
try {
statementFinishedNotifier.wait(1000);
} catch (InterruptedException e) {
}
}
}
if (statementError) {
return new InterpreterResult(Code.ERROR, statementOutput);
} else {
try {
context.out.flush();
} catch (IOException e) {
throw new InterpreterException(e);
}
return new InterpreterResult(Code.SUCCESS);
}
}
public void interrupt() throws IOException, InterpreterException {
if (pythonPid > -1) {
LOGGER.info("Sending SIGINT signal to PID : " + pythonPid);
Runtime.getRuntime().exec("kill -SIGINT " + pythonPid);
} else {
LOGGER.warn("Non UNIX/Linux system, close the interpreter");
close();
}
}
@Override
public void cancel(InterpreterContext context) throws InterpreterException {
if (iPySparkInterpreter != null) {
iPySparkInterpreter.cancel(context);
return;
}
SparkInterpreter sparkInterpreter = getSparkInterpreter();
sparkInterpreter.cancel(context);
try {
interrupt();
} catch (IOException e) {
LOGGER.error("Error", e);
}
}
@Override
public FormType getFormType() {
return FormType.NATIVE;
}
@Override
public int getProgress(InterpreterContext context) throws InterpreterException {
if (iPySparkInterpreter != null) {
return iPySparkInterpreter.getProgress(context);
}
SparkInterpreter sparkInterpreter = getSparkInterpreter();
return sparkInterpreter.getProgress(context);
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor,
InterpreterContext interpreterContext)
throws InterpreterException {
if (iPySparkInterpreter != null) {
return iPySparkInterpreter.completion(buf, cursor, interpreterContext);
}
if (buf.length() < cursor) {
cursor = buf.length();
}
String completionString = getCompletionTargetString(buf, cursor);
String completionCommand = "completion.getCompletion('" + completionString + "')";
//start code for completion
SparkInterpreter sparkInterpreter = getSparkInterpreter();
if (sparkInterpreter.isUnsupportedSparkVersion() || pythonscriptRunning == false) {
return new LinkedList<>();
}
pythonInterpretRequest = new PythonInterpretRequest(completionCommand, "", "");
statementOutput = null;
synchronized (statementSetNotifier) {
statementSetNotifier.notify();
}
String[] completionList = null;
synchronized (statementFinishedNotifier) {
long startTime = System.currentTimeMillis();
while (statementOutput == null
&& pythonscriptRunning) {
try {
if (System.currentTimeMillis() - startTime > MAX_TIMEOUT_SEC * 1000) {
LOGGER.error("pyspark completion didn't have response for {}sec.", MAX_TIMEOUT_SEC);
break;
}
statementFinishedNotifier.wait(1000);
} catch (InterruptedException e) {
// not working
LOGGER.info("wait drop");
return new LinkedList<>();
}
}
if (statementError) {
return new LinkedList<>();
}
Gson gson = new Gson();
completionList = gson.fromJson(statementOutput, String[].class);
}
//end code for completion
if (completionList == null) {
return new LinkedList<>();
}
List<InterpreterCompletion> results = new LinkedList<>();
for (String name: completionList) {
results.add(new InterpreterCompletion(name, name, StringUtils.EMPTY));
}
return results;
}
private String getCompletionTargetString(String text, int cursor) {
String[] completionSeqCharaters = {" ", "\n", "\t"};
int completionEndPosition = cursor;
int completionStartPosition = cursor;
int indexOfReverseSeqPostion = cursor;
String resultCompletionText = "";
String completionScriptText = "";
try {
completionScriptText = text.substring(0, cursor);
}
catch (Exception e) {
LOGGER.error(e.toString());
return null;
}
completionEndPosition = completionScriptText.length();
String tempReverseCompletionText = new StringBuilder(completionScriptText).reverse().toString();
for (String seqCharacter : completionSeqCharaters) {
indexOfReverseSeqPostion = tempReverseCompletionText.indexOf(seqCharacter);
if (indexOfReverseSeqPostion < completionStartPosition && indexOfReverseSeqPostion > 0) {
completionStartPosition = indexOfReverseSeqPostion;
}
}
if (completionStartPosition == completionEndPosition) {
completionStartPosition = 0;
}
else
{
completionStartPosition = completionEndPosition - completionStartPosition;
}
resultCompletionText = completionScriptText.substring(
completionStartPosition , completionEndPosition);
return resultCompletionText;
}
private SparkInterpreter getSparkInterpreter() throws InterpreterException {
LazyOpenInterpreter lazy = null;
SparkInterpreter spark = null;
Interpreter p = getInterpreterInTheSameSessionByClassName(SparkInterpreter.class.getName());
while (p instanceof WrappedInterpreter) {
if (p instanceof LazyOpenInterpreter) {
lazy = (LazyOpenInterpreter) p;
}
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
spark = (SparkInterpreter) p;
if (lazy != null) {
lazy.open();
}
return spark;
}
private IPySparkInterpreter getIPySparkInterpreter() {
LazyOpenInterpreter lazy = null;
IPySparkInterpreter iPySpark = null;
Interpreter p = getInterpreterInTheSameSessionByClassName(IPySparkInterpreter.class.getName());
while (p instanceof WrappedInterpreter) {
if (p instanceof LazyOpenInterpreter) {
lazy = (LazyOpenInterpreter) p;
}
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
iPySpark = (IPySparkInterpreter) p;
return iPySpark;
}
public SparkZeppelinContext getZeppelinContext() throws InterpreterException {
SparkInterpreter sparkIntp = getSparkInterpreter();
if (sparkIntp != null) {
return getSparkInterpreter().getZeppelinContext();
} else {
return null;
}
}
public JavaSparkContext getJavaSparkContext() throws InterpreterException {
SparkInterpreter intp = getSparkInterpreter();
if (intp == null) {
return null;
} else {
return new JavaSparkContext(intp.getSparkContext());
}
}
public Object getSparkSession() throws InterpreterException {
SparkInterpreter intp = getSparkInterpreter();
if (intp == null) {
return null;
} else {
return intp.getSparkSession();
}
}
public SparkConf getSparkConf() throws InterpreterException {
JavaSparkContext sc = getJavaSparkContext();
if (sc == null) {
return null;
} else {
return getJavaSparkContext().getConf();
}
}
public SQLContext getSQLContext() throws InterpreterException {
SparkInterpreter intp = getSparkInterpreter();
if (intp == null) {
return null;
} else {
return intp.getSQLContext();
}
}
private DepInterpreter getDepInterpreter() {
Interpreter p = getInterpreterInTheSameSessionByClassName(DepInterpreter.class.getName());
if (p == null) {
return null;
}
while (p instanceof WrappedInterpreter) {
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
return (DepInterpreter) p;
}
@Override
public void onProcessComplete(int exitValue) {
pythonscriptRunning = false;
LOGGER.info("python process terminated. exit code " + exitValue);
}
@Override
public void onProcessFailed(ExecuteException e) {
pythonscriptRunning = false;
LOGGER.error("python process failed", e);
}
}
| |
package org.myrobotlab.framework;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.Serializable;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
import java.util.zip.ZipFile;
// Do not pull in deps to this class !
import org.myrobotlab.io.FileIO;
import org.myrobotlab.lang.NameGenerator;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.LoggingFactory;
import org.slf4j.Logger;
/**
* The purpose of this class is to retrieve all the detailed information
* regarding the details of the current platform which myrobotlab is running.
*
* It must NOT have references to mrl services, or Runtime, or 3rd party library
* dependencies except perhaps for logging
*
* FIXME - it's silly to have some values in variables and others in the
* manifest map - probably should have all in a Tree map but I didn't want to
* break any javascript which accessed the members directly
*
*/
public class Platform implements Serializable {
transient static Logger log = LoggerFactory.getLogger(Platform.class);
private static final long serialVersionUID = 1L;
// Nixie
public static final String VERSION_PREFIX = "1.1.";
// VM Names
public final static String VM_DALVIK = "dalvik";
public final static String VM_HOTSPOT = "hotspot";
// OS Names
public final static String OS_LINUX = "linux";
public final static String OS_MAC = "mac";
public final static String OS_WINDOWS = "windows";
public final static String UNKNOWN = "unknown";
// arch names
public final static String ARCH_X86 = "x86";
public final static String ARCH_ARM = "arm";
// non-changing values
String os;
String arch;
int osBitness;
int jvmBitness;
String lang = "java";
String vmName;
String vmVersion;
String mrlVersion;
boolean isVirtual = false;
/**
* Static identifier to identify the "instance" of myrobotlab - similar to
* network ip of a device and used in a similar way
*/
String id;
String branch;
String pid;
String hostname;
String commit;
String build;
String motd;
Date startTime;
// all values of the manifest
Map<String, String> manifest;
String shortCommit;
static Platform localInstance;
/**
* The one big convoluted function to get all the crazy platform specific
* data. Potentially, it's done once and only once for a running instance.
* Most of the data should be immutable, although the "id"
*
* All data should be accessed through public functions on the local instance.
* If the local instance is desired. If its from a serialized instance, the
* "getters" will be retrieving appropriate info for that serialized instance.
*
* @return - return the local instance of the current platform
*/
public static Platform getLocalInstance() {
if (localInstance == null) {
log.debug("initializing Platform");
Platform platform = new Platform();
platform.startTime = new Date();
// === OS ===
platform.os = System.getProperty("os.name").toLowerCase();
if (platform.os.indexOf("win") >= 0) {
platform.os = OS_WINDOWS;
} else if (platform.os.indexOf("mac") >= 0) {
platform.os = OS_MAC;
} else if (platform.os.indexOf("linux") >= 0) {
platform.os = OS_LINUX;
}
platform.vmName = System.getProperty("java.vm.name");
platform.vmVersion = System.getProperty("java.specification.version");
// === ARCH ===
String arch = System.getProperty("os.arch").toLowerCase();
if ("i386".equals(arch) || "i486".equals(arch) || "i586".equals(arch) || "i686".equals(arch) || "amd64".equals(arch) || arch.startsWith("x86")) {
platform.arch = "x86"; // don't care at the moment
}
if ("arm".equals(arch)) {
// assume ras pi 1 .
Integer armv = 6;
// if the os version has "v7" in it, it's a pi 2
// TODO: this is still pretty hacky..
String osVersion = System.getProperty("os.version").toLowerCase();
if (osVersion.contains("v7")) {
armv = 7;
}
// TODO: revisit how we determine the architecture version
platform.arch = "armv" + armv + ".hfp";
}
// for Ordroid 64 !
if ("aarch64".equals(arch)) {
platform.arch = "armv8";
}
if (platform.arch == null) {
platform.arch = arch;
}
// === BITNESS ===
if (platform.isWindows()) {
// https://blogs.msdn.microsoft.com/david.wang/2006/03/27/howto-detect-process-bitness/
// this will attempt to guess the bitness of the underlying OS, Java
// tries very hard to hide this from running programs
String procArch = System.getenv("PROCESSOR_ARCHITECTURE");
String procArchWow64 = System.getenv("PROCESSOR_ARCHITEW6432");
platform.osBitness = (procArch != null && procArch.endsWith("64") || procArchWow64 != null && procArchWow64.endsWith("64")) ? 64 : 32;
switch (arch) {
case "x86":
case "i386":
case "i486":
case "i586":
case "i686":
platform.jvmBitness = 32;
break;
case "x86_64":
case "amd64":
platform.jvmBitness = 64;
break;
default:
platform.jvmBitness = 0; // ooops, I guess
break;
}
} else {
// this is actually a really bad way of doing jvm bitness (may return
// "64","32" or "unknown") - and is sometimes simply not there at all
// keeping this as a fallback for all Linux and Mac machines,
// I don't know enough to implement a more robust detection for them
// (and this was here before me, so it has to be good)
String model = System.getProperty("sun.arch.data.model");
platform.jvmBitness = "64".equals(model) ? 64 : 32;
}
// === MRL ===
if (platform.mrlVersion == null) {
SimpleDateFormat format = new SimpleDateFormat("yyyy.MM.dd");
platform.mrlVersion = format.format(new Date());
}
// manifest
Map<String, String> manifest = getManifest();
platform.manifest = manifest;
platform.branch = get(manifest, "GitBranch", "unknownBranch");
platform.commit = get(manifest, "GitCommitIdAbbrev", "unknownCommit");
// build version or git commit timestamp
platform.mrlVersion = get(manifest, "Implementation-Version", "unknownVersion");
// git properties - local build has precedence
Properties gitProps = gitProperties();
if (gitProps != null) {
String gitProp = gitProps.getProperty("git.branch");
platform.branch = (gitProp != null) ? gitProp : platform.branch;
gitProp = gitProps.getProperty("git.commit.id");
platform.commit = (gitProp != null) ? gitProp : platform.commit;
if (platform.commit != null) {
platform.shortCommit = platform.commit.substring(0, 7);
}
}
// motd
platform.motd = "resistance is futile, we have cookies and robots ...";
// hostname
try {
platform.hostname = InetAddress.getLocalHost().getHostName();
if (platform.hostname != null) {
platform.hostname = platform.hostname.toLowerCase();
}
} catch (Exception e) {
platform.hostname = "localhost";
}
SimpleDateFormat TSFormatter = new SimpleDateFormat("yyyyMMddHHmmssSSS");
platform.pid = TSFormatter.format(platform.startTime);
try {
// something like '<pid>@<hostname>', at least in SUN / Oracle JVMs
// but non standard across jvms & hosts
// here we will attempt to standardize it - when asked for pid you
// "only"
// get pid ... if possible
String jvmName = ManagementFactory.getRuntimeMXBean().getName();
int index = jvmName.indexOf('@');
if (index > 1) {
platform.pid = Long.toString(Long.parseLong(jvmName.substring(0, index)));
} else {
platform.pid = jvmName;
}
} catch (Exception e) {
}
localInstance = platform;
}
return localInstance;
}
static public String get(Map<String, String> manifest, String key, String def) {
if (manifest != null & manifest.containsKey(key)) {
return manifest.get(key);
}
return def;
}
static public Properties gitProperties() {
try {
Properties properties = new Properties();
String rootOfClass = FileIO.getRoot();
if (FileIO.isJar()) {
// extract from jar
log.info("git loading properties from jar {}", rootOfClass);
properties.load(Platform.class.getResourceAsStream("/git.properties"));
} else {
// get from file system
String path = FileIO.gluePaths(rootOfClass, "git.properties");
File check = new File(path);
if (!check.exists()) {
log.info("git.properties does not exist");
return null;
}
log.info("git loading from file {}", path);
properties.load(new FileInputStream(path));
}
return properties;
} catch (Exception e) {
log.error("getProperties threw", e);
}
return null;
}
public Platform() {
}
/**
* @return The process id of the currently running Java process
*
*/
public String getPid() {
return pid;
}
/**
* @return The message of the day. "resistance is futile, we have cookies and
* robots ..."
*/
public String getMotd() {
return motd;
}
/**
* @return The branch this software was built from.
*/
public String getBranch() {
return branch;
}
public String getBuild() {
return build;
}
/**
* @return This is the full commit of the source.
*/
public String getCommit() {
return commit;
}
/**
* @return CPU Architecture x86, armv6, armv7, armv8
*/
public String getArch() {
return arch;
}
/**
* @return Os bitness - should be 64 or 32
*/
public int getOsBitness() {
return osBitness;
}
/**
* @return Java virtual machine bitness either 64 or 32 bit
*
*/
public int getJvmBitness() {
return jvmBitness;
}
/**
* @return Operating system type linux, windows, mac
*
*/
public String getOS() {
return os;
}
/**
* @return arc bitness and os together x86.64.linux, armv7.32.linux,
* x86.32.windows etc..
*
*/
public String getPlatformId() {
return String.format("%s.%s.%s", getArch(), getJvmBitness(), getOS());
}
/**
* @return version or myrobotlab
*
*/
public String getVersion() {
return mrlVersion;
}
/**
* @return Name of the Jvm Hotspot or OpenJDK typically
*
*/
public String getVMName() {
return vmName;
}
public boolean isArm() {
return getArch().startsWith(ARCH_ARM);
}
public boolean isDalvik() {
return VM_DALVIK.equals(vmName);
}
public boolean isLinux() {
return OS_LINUX.equals(os);
}
public boolean isMac() {
return OS_MAC.equals(os);
}
public boolean isWindows() {
return OS_WINDOWS.equals(os);
}
public boolean isX86() {
return getArch().equals(ARCH_X86);
}
static public Map<String, String> getManifest() {
Map<String, String> ret = new TreeMap<String, String>();
ZipFile zf = null;
try {
log.debug("getManifest");
String source = Platform.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();
InputStream in = null;
log.debug("source {}", source);
if (source.endsWith("jar")) {
// runtime
// DO NOT DO IT THIS WAY ->
// Platform.class.getResource("/META-INF/MANIFEST.MF").openStream();
// IT DOES NOT WORK WITH OpenJDK !!!
zf = new ZipFile(source);
in = zf.getInputStream(zf.getEntry("META-INF/MANIFEST.MF"));
// zf.close(); explodes on closing :(
} else {
// IDE - version ...
// in = new FileInputStream("target/classes/META-INF/MANIFEST.MF");//
// Platform.class.getResource("target/classes/META-INF/MANIFEST.MF").openStream();
in = new FileInputStream("target/classes/git.properties");// Platform.class.getResource("target/classes/META-INF/MANIFEST.MF").openStream();
}
// String manifest = FileIO.toString(in);
// log.debug("loading manifest {}", manifest);
Properties p = new Properties();
p.load(in);
for (final String name : p.stringPropertyNames()) {
ret.put(name, p.getProperty(name));
}
for (final String name : ret.keySet()) {
log.debug(name + "=" + p.getProperty(name));
}
in.close();
} catch (Exception e) {
e.printStackTrace();
// log.warn("getManifest threw", e);
} finally {
if (zf != null) {
try {
zf.close();
} catch (Exception e) {
}
}
}
return ret;
}
@Override
public String toString() {
return String.format("%s.%d.%s", arch, jvmBitness, os);
}
/**
* @return The instance identifier of the current running myrobotlab. Used for
* connecting multiple myrobotlabs together
*
*/
public String getId() {
// null ids are not allowed
if (id == null) {
id = NameGenerator.getName();
}
return id;
}
/**
* @return The Computer's hostname
*/
public String getHostname() {
return hostname;
}
/**
* @param newId
* Set your own instance identifier
*
*/
public void setId(String newId) {
id = newId;
}
/**
* @return the time when this instance was started
*
*/
public Date getStartTime() {
return startTime;
}
/**
* @return true if running in virtual mode
*
*/
public static boolean isVirtual() {
Platform p = getLocalInstance();
return p.isVirtual;
}
public static void setVirtual(boolean b) {
Platform p = getLocalInstance();
p.isVirtual = b;
}
public static void main(String[] args) {
try {
LoggingFactory.init(Level.DEBUG);
Platform platform = Platform.getLocalInstance();
log.debug("platform : {}", platform.toString());
log.debug("build {}", platform.getBuild());
log.debug("branch {}", platform.getBranch());
log.debug("commit {}", platform.getCommit());
log.debug("toString {}", platform.toString());
} catch (Exception e) {
log.error("main threw", e);
}
}
public boolean getVmVersion() {
// TODO Auto-generated method stub
return false;
}
}
| |
/*
Copyright (c) 2014-2015 F-Secure
See LICENSE for details
*/
package cc.softwarefactory.lokki.android.fragments;
import android.content.Context;
import android.content.DialogInterface;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.text.Editable;
import android.text.InputType;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.androidquery.AQuery;
import com.androidquery.callback.AjaxCallback;
import com.androidquery.callback.AjaxStatus;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import cc.softwarefactory.lokki.android.MainApplication;
import cc.softwarefactory.lokki.android.R;
import cc.softwarefactory.lokki.android.avatar.AvatarLoader;
import cc.softwarefactory.lokki.android.datasources.contacts.ContactDataSource;
import cc.softwarefactory.lokki.android.datasources.contacts.DefaultContactDataSource;
import cc.softwarefactory.lokki.android.models.Contact;
import cc.softwarefactory.lokki.android.services.ContactService;
import cc.softwarefactory.lokki.android.utilities.AnalyticsUtils;
import cc.softwarefactory.lokki.android.utilities.ServerApi;
public class AddContactsFragment extends Fragment {
private static final String TAG = "AddContacts";
private List<Contact> contactList;
private List<Contact> phoneContacts;
private AQuery aq;
private Boolean cancelAsynTasks = false;
private Context context;
private AvatarLoader avatarLoader;
private EditText inputSearch;
private Button clearFilter;
private ArrayAdapter<Contact> adapter;
private TextView noContactsMessage;
private ContactService contactService;
public AddContactsFragment(Context context) {
this.context = context;
this.contactService = new ContactService(context);
contactList = new ArrayList<>();
phoneContacts = new ArrayList<>();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.activity_add_contacts, container, false);
aq = new AQuery(getActivity(), rootView);
cancelAsynTasks = false;
context = getActivity().getApplicationContext();
contactService = new ContactService(context);
avatarLoader = new AvatarLoader();
inputSearch = (EditText) rootView.findViewById(R.id.add_contact_search);
inputSearch.setEnabled(false);
inputSearch.setAlpha(0);
noContactsMessage = (TextView) rootView.findViewById(R.id.no_contacts_message);
noContactsMessage.setText(R.string.no_contacts_to_show);
clearFilter = (Button) rootView.findViewById(R.id.clear_filter);
return rootView;
}
@Override
public void onResume() {
super.onResume();
ActionBar actionBar = ((AppCompatActivity) getActivity()).getSupportActionBar();
if (actionBar != null) {
actionBar.setTitle(R.string.add_contacts);
}
loadContacts();
enableSearchFilter();
AnalyticsUtils.screenHit(getString(R.string.analytics_screen_add_contacts));
}
private void loadContacts() {
String[] loadingList = {getString(R.string.loading)};
aq.id(R.id.add_contacts_list_view).adapter(new ArrayAdapter<>(getActivity(), android.R.layout.simple_list_item_1, loadingList));
new getAllEmailAddressesAsync().execute();
}
public void setPhoneContacts(List<Contact> phoneContacts) {
this.phoneContacts = phoneContacts;
}
private void enableSearchFilter() {
inputSearch.addTextChangedListener(new TextWatcher() {
@Override
public void onTextChanged(CharSequence cs, int arg1, int arg2, int arg3) {
// When user changed the Text
if (adapter != null) {
adapter.getFilter().filter(cs);
}
clearFilter.setVisibility(cs.length() == 0 ? View.INVISIBLE : View.VISIBLE);
}
@Override
public void beforeTextChanged(CharSequence arg0, int arg1, int arg2, int arg3) {
// TODO Auto-generated method stub
}
@Override
public void afterTextChanged(Editable arg0) {
// TODO Auto-generated method stub
}
});
inputSearch.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_search_contacts_textbox));
}
});
clearFilter.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_clear_search_contacts_textbox_button));
inputSearch.setText("");
}
});
}
private class getAllEmailAddressesAsync extends AsyncTask<Void, Void, List<Contact>> {
@Override
protected List<Contact> doInBackground(Void... params) {
return phoneContacts;
}
@Override
protected void onPostExecute(List<Contact> phoneContacts) {
Log.d(TAG, "Number of contacts: " + phoneContacts.size());
Log.d(TAG, "Contacts: " + phoneContacts);
// We create a dictionary for performance.
Map<String, Contact> savedContacts = new HashMap<>();
for (Contact contact : MainApplication.contacts) {
savedContacts.put(contact.getEmail(), contact);
}
for (Contact contact : phoneContacts) {
String email = contact.getEmail();
if (!savedContacts.containsKey(email)) {
contactList.add(contact);
}
}
setListAdapter();
super.onPostExecute(contactList);
}
}
private void setListAdapter() {
adapter = new ArrayAdapter<Contact>(getActivity(), R.layout.add_people_row_layout, contactList) {
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
ViewHolder holder;
if (convertView == null) {
convertView = getActivity().getLayoutInflater().inflate(R.layout.add_people_row_layout, parent, false);
holder = new ViewHolder();
holder.name = (TextView) convertView.findViewById(R.id.contact_name);
holder.email = (TextView) convertView.findViewById(R.id.contact_email);
holder.photo = (ImageView) convertView.findViewById(R.id.contact_photo);
convertView.setTag(holder);
} else {
holder = (ViewHolder) convertView.getTag();
}
AQuery aq = new AQuery(convertView);
final Contact contact = getItem(position);
inputSearch.setEnabled(true);
inputSearch.setAlpha(1);
noContactsMessage.setAlpha(0);
avatarLoader.load(contact, holder.photo);
aq.id(holder.name).text(contact.toString());
aq.id(holder.email).text(contact.getEmail());
holder.position = position;
convertView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_contact_in_list));
final Context context = getContext();
String title = getString(R.string.add_contact);
String message = getString(R.string.add_contact_dialog_save, contact.getEmail());
new AlertDialog.Builder(context)
.setTitle(title)
.setMessage(message)
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_confirm_contact_add_from_list_dialog));
if (contact.emailIsSameAs(MainApplication.user.getEmail())) {
Toast.makeText(context, R.string.cant_add_self_as_contact, Toast.LENGTH_LONG).show();
} else {
contactService.allowContacts(Arrays.asList(contact), new AjaxCallback<String>() {
@Override
public void callback(String url, String result, AjaxStatus status) {
ServerApi.logStatus("allowPeople", status);
if(status.getError() != null)
Toast.makeText(context, R.string.unable_to_add_contact, Toast.LENGTH_LONG).show();
else {
contactList.remove(position);
notifyDataSetChanged();
contactService.getContacts();
Toast.makeText(context, R.string.contact_added, Toast.LENGTH_SHORT).show();
}
}
});
}
}
})
.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_cancel_contact_add_from_list_dialog));
}
})
.show();
}
});
return convertView;
}
};
aq.id(R.id.add_contacts_list_view).adapter(adapter);
}
public static void addContactFromEmail(final Context context) {
final EditText input = new EditText(context); // Set an EditText view to get user input
input.setSingleLine(true);
input.setHint(R.string.contact_email_address);
input.setInputType(InputType.TYPE_TEXT_VARIATION_WEB_EMAIL_ADDRESS);
final AlertDialog addContactDialog = new AlertDialog.Builder(context)
.setTitle(context.getString(R.string.add_contact))
.setView(input)
.setPositiveButton(R.string.ok, null)
.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
AnalyticsUtils.eventHit(context.getString(R.string.analytics_category_ux),
context.getString(R.string.analytics_action_click),
context.getString(R.string.analytics_label_cancel_contact_add_from_email_dialog));
}
})
.create();
addContactDialog.setOnShowListener(new DialogInterface.OnShowListener() {
@Override
public void onShow(DialogInterface dialog) {
addContactDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Editable value = input.getText();
if (value == null || value.toString().isEmpty()) {
input.setError(context.getResources().getString(R.string.required));
return;
}
AnalyticsUtils.eventHit(context.getString(R.string.analytics_category_ux),
context.getString(R.string.analytics_action_click),
context.getString(R.string.analytics_label_confirm_contact_add_from_email_dialog_successful));
final String email = value.toString();
Contact contact = new Contact();
contact.setEmail(email);
if (contact.emailIsSameAs(MainApplication.user.getEmail())) {
Toast.makeText(context, R.string.cant_add_self_as_contact, Toast.LENGTH_LONG).show();
} else {
final ContactService contactService = new ContactService(context);
contactService.allowContacts(Arrays.asList(contact), new AjaxCallback<String>() {
@Override
public void callback(String url, String result, AjaxStatus status) {
ServerApi.logStatus("allowPeople", status);
if(status.getError() != null)
Toast.makeText(context, R.string.unable_to_add_contact, Toast.LENGTH_LONG).show();
else {
contactService.getContacts();
Toast.makeText(context, R.string.contact_added, Toast.LENGTH_SHORT).show();
}
}
});
}
addContactDialog.dismiss();
}
});
}
});
addContactDialog.show();
}
static class ViewHolder {
TextView name;
TextView email;
ImageView photo;
int position;
}
}
| |
package org.amc.game.chessserver;
import static org.amc.game.chessserver.StompController.MESSAGE_HEADER_TYPE;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
import org.amc.DAOException;
import org.amc.dao.ServerChessGameDAO;
import org.amc.game.chess.ChessBoard;
import org.amc.game.chess.ChessGame;
import org.amc.game.chess.ChessGameFactory;
import org.amc.game.chess.ChessGamePlayer;
import org.amc.game.chess.Colour;
import org.amc.game.chess.HumanPlayer;
import org.amc.game.chess.RealChessGamePlayer;
import org.amc.game.chessserver.AbstractServerChessGame.ServerGameStatus;
import org.junit.*;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.messaging.simp.SimpMessagingTemplate;
import java.security.Principal;
import java.util.HashMap;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.EntityTransaction;
import javax.persistence.OptimisticLockException;
public class SaveGameStompControllerUnitTest {
private SaveGameStompController controller;
private ChessGamePlayer whitePlayer = new RealChessGamePlayer(new HumanPlayer("Stephen"), Colour.WHITE);
private ChessGamePlayer blackPlayer = new RealChessGamePlayer(new HumanPlayer("Chris"), Colour.BLACK);
private ChessGamePlayer unknownPlayer = new RealChessGamePlayer(new HumanPlayer("Villian"), Colour.BLACK);
private static final long gameUUID = 1234L;
private ServerChessGame scg;
private Map<String, Object> sessionAttributes;
private SimpMessagingTemplate template = mock(SimpMessagingTemplate.class);
private ArgumentCaptor<String> userArgument;
private ArgumentCaptor<String> destinationArgument;
private ArgumentCaptor<String> payloadArgument;
@SuppressWarnings("rawtypes")
private ArgumentCaptor<Map> headersArgument;
@Mock
private ServerChessGameDAO serverChessGameDAO;
@Mock
private EntityManager entityManager;
@Mock
private EntityTransaction transaction;
private Principal principal = new Principal() {
@Override
public String getName() {
return "Stephen";
}
};
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
scg = new TwoViewServerChessGame(gameUUID, whitePlayer);
scg.setChessGameFactory(new ChessGameFactory() {
@Override
public ChessGame getChessGame(ChessBoard board, ChessGamePlayer playerWhite,
ChessGamePlayer playerBlack) {
return new ChessGame(board, playerWhite, playerBlack);
}
});
this.controller = new SaveGameStompController();
controller.setServerChessDAO(serverChessGameDAO);
sessionAttributes = new HashMap<String, Object>();
this.controller.setTemplate(template);
userArgument = ArgumentCaptor.forClass(String.class);
destinationArgument = ArgumentCaptor.forClass(String.class);
payloadArgument = ArgumentCaptor.forClass(String.class);
headersArgument = ArgumentCaptor.forClass(Map.class);
scg.addOpponent(blackPlayer);
sessionAttributes.put("PLAYER", whitePlayer);
when(entityManager.getTransaction()).thenReturn(transaction);
when(serverChessGameDAO.getEntityManager()).thenReturn(entityManager);
}
@AfterClass
public static void tearDownClass() {
}
@Test
public void testSaveGame() throws DAOException {
when(serverChessGameDAO.getServerChessGame(eq(gameUUID))).thenReturn(scg);
when(serverChessGameDAO.saveServerChessGame(eq(scg))).thenReturn(scg);
controller.save(principal, sessionAttributes, gameUUID, "Save");
verify(serverChessGameDAO,times(1)).saveServerChessGame(eq(scg));
verifySimpMessagingTemplateCallToUser();
assertEquals(MessageType.INFO, headersArgument.getValue().get(MESSAGE_HEADER_TYPE));
assertEquals(SaveGameStompController.GAME_SAVED_SUCCESS, payloadArgument.getValue());
}
@SuppressWarnings("unchecked")
private void verifySimpMessagingTemplateCallToUser() {
verify(template).convertAndSendToUser(userArgument.capture(),
destinationArgument.capture(), payloadArgument.capture(),
headersArgument.capture());
}
@Test
public void testSaveGameServerChessGameDoesntExist() throws DAOException {
Long invalidGameUID = 1L;
controller.save(principal, sessionAttributes, invalidGameUID, "Save");
verify(serverChessGameDAO, never()).saveServerChessGame(eq(scg));
verifySimpMessagingTemplateCallToUser();
assertEquals(MessageType.INFO, headersArgument.getValue().get(MESSAGE_HEADER_TYPE));
assertEquals(String.format(SaveGameStompController.SAVE_ERROR_GAME_DOESNT_EXIST_ERROR, invalidGameUID),
payloadArgument.getValue());
}
@Test
public void testSaveFinishedServerChessGame() throws DAOException {
when(serverChessGameDAO.getServerChessGame(eq(gameUUID))).thenReturn(scg);
this.scg.setCurrentStatus(ServerGameStatus.FINISHED);
controller.save(principal, sessionAttributes, gameUUID, "Save");
verify(serverChessGameDAO, never()).addEntity(eq(scg));
verifySimpMessagingTemplateCallToUser();
assertEquals(MessageType.INFO, headersArgument.getValue().get(MESSAGE_HEADER_TYPE));
assertEquals(SaveGameStompController.SAVE_ERROR_GAME_IS_OVER, payloadArgument.getValue());
}
@Test
public void testSaveServerChessGameDAOException() throws DAOException {
when(serverChessGameDAO.getServerChessGame(eq(gameUUID))).thenReturn(scg);
doThrow(new DAOException("Database connection closed"))
.when(serverChessGameDAO).saveServerChessGame(eq(scg));
controller.save(principal, sessionAttributes, gameUUID, "Save");
verify(serverChessGameDAO, times(1)).saveServerChessGame(eq(scg));
verifySimpMessagingTemplateCallToUser();
assertEquals(MessageType.INFO, headersArgument.getValue().get(MESSAGE_HEADER_TYPE));
assertEquals(SaveGameStompController.SAVE_ERROR_CANT_BE_SAVED, payloadArgument.getValue());
}
@Test
public void testGetServerChessGameDAOException() throws DAOException {
doThrow(new DAOException("Database connection closed"))
.when(serverChessGameDAO).getServerChessGame(eq(gameUUID));
controller.save(principal, sessionAttributes, gameUUID, "Save");
verify(serverChessGameDAO, times(0)).saveServerChessGame(eq(scg));
verifySimpMessagingTemplateCallToUser();
assertEquals(MessageType.INFO, headersArgument.getValue().get(MESSAGE_HEADER_TYPE));
assertEquals(String.format(SaveGameStompController.SAVE_ERROR_GAME_DOESNT_EXIST_ERROR, gameUUID),
payloadArgument.getValue());
}
@Test
public void testSaveServerChessGameByUnknownPlayer() throws DAOException {
when(serverChessGameDAO.getServerChessGame(eq(gameUUID))).thenReturn(scg);
sessionAttributes.put("PLAYER", unknownPlayer);
controller.save(principal, sessionAttributes, gameUUID, "Save");
verify(serverChessGameDAO, never()).addEntity(eq(scg));
verifySimpMessagingTemplateCallToUser();
assertEquals(MessageType.INFO, headersArgument.getValue().get(MESSAGE_HEADER_TYPE));
assertEquals(SaveGameStompController.ERROR_UNKNOWN_PLAYER, payloadArgument.getValue());
}
@Test
public void testSaveThrowsOptimisticLockingException() throws DAOException {
when(serverChessGameDAO.getServerChessGame(eq(gameUUID))).thenReturn(scg);
doThrow(new OptimisticLockException())
.when(serverChessGameDAO).saveServerChessGame(eq(scg));
controller.save(principal, sessionAttributes, gameUUID, "Save");
verify(serverChessGameDAO, times(1)).saveServerChessGame(eq(scg));
verifySimpMessagingTemplateCallToUser();
assertEquals(MessageType.INFO, headersArgument.getValue().get(MESSAGE_HEADER_TYPE));
assertEquals(SaveGameStompController.SAVE_ERROR_CANT_BE_SAVED, payloadArgument.getValue());
}
@Test
public void testSaveThrowsOptimisticLockingExceptionThenDAOException() throws DAOException {
when(serverChessGameDAO.getServerChessGame(eq(gameUUID))).thenReturn(scg);
doThrow(new OptimisticLockException())
.when(serverChessGameDAO).saveServerChessGame(eq(scg));
doThrow(new DAOException("Database connection closed"))
.when(serverChessGameDAO).deleteEntity(eq(scg));
controller.save(principal, sessionAttributes, gameUUID, "Save");
verify(serverChessGameDAO, times(1)).saveServerChessGame(eq(scg));
verifySimpMessagingTemplateCallToUser();
assertEquals(MessageType.INFO, headersArgument.getValue().get(MESSAGE_HEADER_TYPE));
assertEquals(SaveGameStompController.SAVE_ERROR_CANT_BE_SAVED, payloadArgument.getValue());
}
}
| |
package org.monarch.sim;
import java.util.Random;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.neo4j.cypher.javacompat.ExecutionEngine;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.RelationshipType;
import org.neo4j.graphdb.Transaction;
import org.neo4j.test.TestGraphDatabaseFactory;
import org.neo4j.tooling.GlobalGraphOperations;
public class CypherTest {
// Define the relationships we want to use.
static enum RelTypes implements RelationshipType {
SUBCLASS,
}
// Complete graph.
static GraphDatabaseService completeDB;
static ExecutionEngine completeEngine;
// Balanced binary tree.
static GraphDatabaseService treeDB;
static ExecutionEngine treeEngine;
// Augmented binary tree.
static GraphDatabaseService augTreeDB;
static ExecutionEngine augTreeEngine;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
buildCompleteDB(16);
buildTreeDB(15);
}
public static Node addNode(GraphDatabaseService db) {
// Wrap a transaction around node creation.
Transaction tx = db.beginTx();
Node newNode = db.createNode();
tx.success();
tx.finish();
return newNode;
}
public static Node addNode(GraphDatabaseService db, String name) {
// Wrap a transaction around node creation.
Transaction tx = db.beginTx();
Node newNode = db.createNode();
newNode.setProperty("name", name);
tx.success();
tx.finish();
return newNode;
}
public static Relationship addEdge(GraphDatabaseService db, Node first, Node second)
{
// Wrap a transaction around edge creation.
Transaction tx = db.beginTx();
Relationship newRel = first.createRelationshipTo(second, RelTypes.SUBCLASS);
tx.success();
tx.finish();
return newRel;
}
private static void buildCompleteDB(int numNodes) {
completeDB = new TestGraphDatabaseFactory().newImpermanentDatabase();
completeEngine = new ExecutionEngine(completeDB);
// Query taken from neo4j.org.
// Create a temporary central node.
String query = "CREATE (center {count:0}) ";
// Build the right number of nodes.
query += "FOREACH (";
query += "x in range(1, " + numNodes + ") ";
query += "| ";
query += "CREATE (leaf {count:x}), (center)-[:X]->(leaf)";
query += ") ";
query += "WITH center ";
// Connect nodes in the correct direction.
query += "MATCH (leaf1)<--(center)-->(leaf2) ";
query += "WHERE id(leaf1) < id(leaf2) ";
query += "CREATE (leaf1)-[:X]->(leaf2) ";
query += "WITH center ";
// Delete the central node and associated edges.
query += "MATCH (center)-[r]->() ";
query += "DELETE center, r ";
completeEngine.execute(query);
}
private static void buildTreeDB(int numNodes) {
treeDB = new TestGraphDatabaseFactory().newImpermanentDatabase();
treeEngine = new ExecutionEngine(treeDB);
buildAugmentedTree(numNodes, 0, treeDB, treeEngine);
}
private static void buildAugmentedTree(int numNodes, int extraEdges,
GraphDatabaseService db, ExecutionEngine engine) {
// Build a balanced binary tree.
for (int i = 1; i <= numNodes; i++)
{
Node newNode = addNode(db);
if (i != 1)
{
addEdge(db, newNode, db.getNodeById(i / 2));
}
}
// Throw in extra edges.
for (int i = 0; i < extraEdges; i++)
{
// Choose two random indexes.
Random rand = new Random();
int first = rand.nextInt(numNodes);
int second = first;
while (first == second)
{
second = rand.nextInt(numNodes);
}
// Order the indexes correctly.
if (first < second)
{
int temp = first;
first = second;
second = temp;
}
// If the edge already exists, try again.
Node firstNode = db.getNodeById(first);
Node secondNode = db.getNodeById(second);
boolean exists = false;
for (Relationship edge : GlobalGraphOperations.at(db).getAllRelationships())
{
if (edge.getStartNode().equals(firstNode) && edge.getEndNode().equals(secondNode))
{
exists = true;
break;
}
}
if (exists)
{
i--;
continue;
}
// Connect the associated nodes.
addEdge(db, firstNode, secondNode);
}
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
// Clean up all our graphs.
completeDB.shutdown();
treeDB.shutdown();
}
public void benchmarkAugTree() {
int numNodes = 15;
int possibleEdges = (numNodes - 1) * (numNodes - 2) / 2;
int trials = 10;
int pairsToTry = 100;
int warmupPairs = 10;
System.out.println(numNodes + " nodes");
// Vary the number of edges to add.
for (int percent = 0; percent <= 50; percent += 10)
{
int extraEdges = percent * possibleEdges / 100;
long totalTime = 0;
// Repeat to get an average.
for (int trial = 0; trial < trials; trial++)
{
// Build the augmented tree.
augTreeDB = new TestGraphDatabaseFactory().newImpermanentDatabase();
augTreeEngine = new ExecutionEngine(augTreeDB);
buildAugmentedTree(numNodes, extraEdges, augTreeDB, augTreeEngine);
// Try pairs.
long start, end;
for (int pairsTried = 0; pairsTried < pairsToTry + warmupPairs; pairsTried++)
{
// Get two random nodes.
Random rand = new Random();
Node first = augTreeDB.getNodeById(rand.nextInt(numNodes));
Node second = augTreeDB.getNodeById(rand.nextInt(numNodes));
// Find common ancestors.
start = System.nanoTime();
CypherTraversals.getCommonAncestors(first, second, augTreeEngine);
end = System.nanoTime();
if (pairsTried >= warmupPairs)
{
totalTime += end - start;
}
}
// Clean up.
augTreeDB.shutdown();
}
// Show the results.
long avgMilliTime = totalTime / (1000000 * trials);
System.out.println(percent + "%: " + avgMilliTime + " milliseconds");
}
}
@Test
public void test() {
// Iterable<Node> nodes = GlobalGraphOperations.at(completeDB).getAllNodes();
// for (Node n : nodes)
// {
// System.out.println(CypherTraversals.getAncestors(n, completeEngine));
// }
// for (Node first : nodes)
// {
// long firstId = first.getId();
// for (Node second : nodes)
// {
// long secondId = second.getId();
// System.out.println(firstId + " " + secondId + ":");
// System.out.println(CypherTraversals.getCommonAncestors(first, second, completeEngine));
// }
// }
// benchmarkAugTree();
// for (Relationship edge : GlobalGraphOperations.at(completeDB).getAllRelationships())
// {
// System.out.println(edge.getStartNode() + "->" + edge.getEndNode());
// }
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.testing;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.IterableCoder;
import com.google.cloud.dataflow.sdk.transforms.SerializableFunction;
import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow;
import com.google.cloud.dataflow.sdk.transforms.windowing.FixedWindows;
import com.google.cloud.dataflow.sdk.transforms.windowing.IntervalWindow;
import com.google.cloud.dataflow.sdk.util.WindowedValue;
import com.google.cloud.dataflow.sdk.util.WindowingStrategy;
import com.google.cloud.dataflow.sdk.values.PCollectionView;
import com.google.cloud.dataflow.sdk.values.PValueBase;
import com.google.cloud.dataflow.sdk.values.TupleTag;
import com.google.common.base.Function;
import com.google.common.base.MoreObjects;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.joda.time.Duration;
import org.joda.time.Instant;
import java.util.List;
import java.util.Objects;
/**
* Methods for creating and using {@link PCollectionView} instances.
*/
public final class PCollectionViewTesting {
// Do not instantiate; static methods only
private PCollectionViewTesting() { }
/**
* The length of the default window, which is an {@link IntervalWindow}, but kept encapsulated
* as it is not for the user to know what sort of window it is.
*/
private static final long DEFAULT_WINDOW_MSECS = 1000 * 60 * 60;
/**
* A default windowing strategy. Tests that are not concerned with the windowing
* strategy should not specify it, and all views will use this.
*/
public static final WindowingStrategy<?, ?> DEFAULT_WINDOWING_STRATEGY =
WindowingStrategy.of(FixedWindows.of(new Duration(DEFAULT_WINDOW_MSECS)));
/**
* A default window into which test elements will be placed, if the window is
* not explicitly overridden.
*/
public static final BoundedWindow DEFAULT_NONEMPTY_WINDOW =
new IntervalWindow(new Instant(0), new Instant(DEFAULT_WINDOW_MSECS));
/**
* A timestamp in the {@link #DEFAULT_NONEMPTY_WINDOW}.
*/
public static final Instant DEFAULT_TIMESTAMP = DEFAULT_NONEMPTY_WINDOW.maxTimestamp().minus(1);
/**
* A window into which no element will be placed by methods in this class, unless explicitly
* requested.
*/
public static final BoundedWindow DEFAULT_EMPTY_WINDOW = new IntervalWindow(
DEFAULT_NONEMPTY_WINDOW.maxTimestamp(),
DEFAULT_NONEMPTY_WINDOW.maxTimestamp().plus(DEFAULT_WINDOW_MSECS));
/**
* A specialization of {@link SerializableFunction} just for putting together
* {@link PCollectionView} instances.
*/
public static interface ViewFn<ElemT, ViewT>
extends SerializableFunction<Iterable<WindowedValue<ElemT>>, ViewT> { }
/**
* A {@link ViewFn} that returns the provided contents as a fully lazy iterable.
*/
public static class IdentityViewFn<T> implements ViewFn<T, Iterable<T>> {
private static final long serialVersionUID = 0L;
@Override
public Iterable<T> apply(Iterable<WindowedValue<T>> contents) {
return Iterables.transform(contents, new Function<WindowedValue<T>, T>() {
@Override
public T apply(WindowedValue<T> windowedValue) {
return windowedValue.getValue();
}
});
}
}
/**
* A {@link ViewFn} that traverses the whole iterable eagerly and returns the number of elements.
*
* <p>Only for use in testing scenarios with small collections. If there are more elements
* provided than {@code Integer.MAX_VALUE} then behavior is unpredictable.
*/
public static class LengthViewFn<T> implements ViewFn<T, Long> {
private static final long serialVersionUID = 0L;
@Override
public Long apply(Iterable<WindowedValue<T>> contents) {
return (long) Iterables.size(contents);
}
}
/**
* A {@link ViewFn} that always returns the value with which it is instantiated.
*/
public static class ConstantViewFn<ElemT, ViewT> implements ViewFn<ElemT, ViewT> {
private static final long serialVersionUID = 0L;
private ViewT value;
public ConstantViewFn(ViewT value) {
this.value = value;
}
@Override
public ViewT apply(Iterable<WindowedValue<ElemT>> contents) {
return value;
}
}
/**
* A {@link PCollectionView} explicitly built from a {@link TupleTag}
* and conversion {@link ViewFn}, and an element coder, using the
* {@link #DEFAULT_WINDOWING_STRATEGY}.
*
* <p>This method is only recommended for use by runner implementors to test their
* implementations. It is very easy to construct a {@link PCollectionView} that does
* not respect the invariants required for proper functioning.
*
* <p>Note that if the provided {@code WindowingStrategy} does not match that of the windowed
* values provided to the view during execution, results are unpredictable. It is recommended
* that the values be prepared via {@link #contentsInDefaultWindow}.
*/
public static <ElemT, ViewT> PCollectionView<ViewT> testingView(
TupleTag<Iterable<WindowedValue<ElemT>>> tag,
ViewFn<ElemT, ViewT> viewFn,
Coder<ElemT> elemCoder) {
return testingView(
tag,
viewFn,
elemCoder,
DEFAULT_WINDOWING_STRATEGY);
}
/**
* A {@link PCollectionView} explicitly built from its {@link TupleTag},
* {@link WindowingStrategy}, {@link Coder}, and conversion function.
*
* <p>This method is only recommended for use by runner implementors to test their
* implementations. It is very easy to construct a {@link PCollectionView} that does
* not respect the invariants required for proper functioning.
*
* <p>Note that if the provided {@code WindowingStrategy} does not match that of the windowed
* values provided to the view during execution, results are unpredictable.
*/
public static <ElemT, ViewT> PCollectionView<ViewT> testingView(
TupleTag<Iterable<WindowedValue<ElemT>>> tag,
ViewFn<ElemT, ViewT> viewFn,
Coder<ElemT> elemCoder,
WindowingStrategy<?, ?> windowingStrategy) {
return new PCollectionViewFromParts<>(
tag,
viewFn,
windowingStrategy,
IterableCoder.of(
WindowedValue.getFullCoder(elemCoder, windowingStrategy.getWindowFn().windowCoder())));
}
/**
* Prepares {@code values} for reading as the contents of a {@link PCollectionView} side input.
*/
public static <T> Iterable<WindowedValue<T>> contentsInDefaultWindow(T... values)
throws Exception {
List<WindowedValue<T>> windowedValues = Lists.newArrayList();
for (T value : values) {
windowedValues.add(WindowedValue.of(value, DEFAULT_TIMESTAMP, DEFAULT_NONEMPTY_WINDOW));
}
return windowedValues;
}
///////////////////////////////////////////////////////////////////////////////
// Internal details below here
/**
* A {@link PCollectionView} explicitly built from its {@link TupleTag},
* {@link WindowingStrategy}, and conversion function.
*
* <p>Instantiate via {@link #testingView}.
*/
private static class PCollectionViewFromParts<ElemT, ViewT>
extends PValueBase
implements PCollectionView<ViewT> {
private static final long serialVersionUID = 0L;
private TupleTag<Iterable<WindowedValue<ElemT>>> tag;
private ViewFn<ElemT, ViewT> viewFn;
private WindowingStrategy<?, ?> windowingStrategy;
private Coder<Iterable<WindowedValue<ElemT>>> coder;
public PCollectionViewFromParts(
TupleTag<Iterable<WindowedValue<ElemT>>> tag,
ViewFn<ElemT, ViewT> viewFn,
WindowingStrategy<?, ?> windowingStrategy,
Coder<Iterable<WindowedValue<ElemT>>> coder) {
this.tag = tag;
this.viewFn = viewFn;
this.windowingStrategy = windowingStrategy;
this.coder = coder;
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public TupleTag<Iterable<WindowedValue<?>>> getTagInternal() {
return (TupleTag) tag;
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public ViewT fromIterableInternal(Iterable<WindowedValue<?>> contents) {
return (ViewT) viewFn.apply((Iterable) contents);
}
@Override
public WindowingStrategy<?, ?> getWindowingStrategyInternal() {
return windowingStrategy;
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public Coder<Iterable<WindowedValue<?>>> getCoderInternal() {
return (Coder) coder;
}
@Override
public int hashCode() {
return Objects.hash(tag);
}
@Override
public boolean equals(Object other) {
if (!(other instanceof PCollectionView)) {
return false;
}
@SuppressWarnings("unchecked")
PCollectionView<?> otherView = (PCollectionView<?>) other;
return tag.equals(otherView.getTagInternal());
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("tag", tag)
.add("viewFn", viewFn)
.toString();
}
}
}
| |
package org.jolokia.backend;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.*;
import javax.management.*;
import org.jolokia.backend.executor.NotChangedException;
import org.jolokia.config.ConfigKey;
import org.jolokia.config.Configuration;
import org.jolokia.converter.Converters;
import org.jolokia.converter.json.JsonConvertOptions;
import org.jolokia.detector.ServerHandle;
import org.jolokia.discovery.AgentDetails;
import org.jolokia.discovery.AgentDetailsHolder;
import org.jolokia.history.HistoryStore;
import org.jolokia.request.JmxRequest;
import org.jolokia.restrictor.AllowAllRestrictor;
import org.jolokia.restrictor.Restrictor;
import org.jolokia.util.*;
import org.json.simple.JSONObject;
import static org.jolokia.config.ConfigKey.*;
/*
* Copyright 2009-2013 Roland Huss
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Backendmanager for dispatching to various backends based on a given
* {@link JmxRequest}
*
* @author roland
* @since Nov 11, 2009
*/
public class BackendManager implements AgentDetailsHolder {
// Dispatches request to local MBeanServer
private LocalRequestDispatcher localDispatcher;
// Converter for converting various attribute object types
// a JSON representation
private Converters converters;
// Hard limits for conversion
private JsonConvertOptions.Builder convertOptionsBuilder;
// Handling access restrictions
private Restrictor restrictor;
// History handler
private HistoryStore historyStore;
// Storage for storing debug information
private DebugStore debugStore;
// Loghandler for dispatching logs
private LogHandler logHandler;
// List of RequestDispatchers to consult
private List<RequestDispatcher> requestDispatchers;
// Initialize used for late initialization
// ("volatile: because we use double-checked locking later on
// --> http://www.cs.umd.edu/~pugh/java/memoryModel/DoubleCheckedLocking.html)
private volatile Initializer initializer;
// Details about the agent inclding the server handle
private AgentDetails agentDetails;
/**
* Construct a new backend manager with the given configuration and which allows
* every operation (no restrictor)
*
* @param pConfig configuration used for tuning this handler's behaviour
* @param pLogHandler logger
*/
public BackendManager(Configuration pConfig, LogHandler pLogHandler) {
this(pConfig, pLogHandler, null);
}
/**
* Construct a new backend manager with the given configuration.
*
* @param pConfig configuration used for tuning this handler's behaviour
* @param pLogHandler logger
* @param pRestrictor a restrictor for limiting access. Can be null in which case every operation is allowed
*/
public BackendManager(Configuration pConfig, LogHandler pLogHandler, Restrictor pRestrictor) {
this(pConfig,pLogHandler,pRestrictor,false);
}
/**
* Construct a new backend manager with the given configuration.
*
* @param pConfig configuration map used for tuning this handler's behaviour
* @param pLogHandler logger
* @param pRestrictor a restrictor for limiting access. Can be null in which case every operation is allowed
* @param pLazy whether the initialisation should be done lazy
*/
public BackendManager(Configuration pConfig, LogHandler pLogHandler, Restrictor pRestrictor, boolean pLazy) {
// Access restrictor
restrictor = pRestrictor != null ? pRestrictor : new AllowAllRestrictor();
// Log handler for putting out debug
logHandler = pLogHandler;
// Details about the agent, used for discovery
agentDetails = new AgentDetails(pConfig);
if (pLazy) {
initializer = new Initializer(pConfig);
} else {
init(pConfig);
initializer = null;
}
}
/**
* Handle a single JMXRequest. The response status is set to 200 if the request
* was successful
*
* @param pJmxReq request to perform
* @return the already converted answer.
* @throws InstanceNotFoundException
* @throws AttributeNotFoundException
* @throws ReflectionException
* @throws MBeanException
*/
public JSONObject handleRequest(JmxRequest pJmxReq) throws InstanceNotFoundException, AttributeNotFoundException,
ReflectionException, MBeanException, IOException {
lazyInitIfNeeded();
boolean debug = isDebug();
long time = 0;
if (debug) {
time = System.currentTimeMillis();
}
JSONObject json;
try {
json = callRequestDispatcher(pJmxReq);
// Update global history store, add timestamp and possibly history information to the request
historyStore.updateAndAdd(pJmxReq,json);
json.put("status",200 /* success */);
} catch (NotChangedException exp) {
// A handled indicates that its value hasn't changed. We return an status with
//"304 Not Modified" similar to the HTTP status code (http://en.wikipedia.org/wiki/HTTP_status)
json = new JSONObject();
json.put("request",pJmxReq.toJSON());
json.put("status",304);
json.put("timestamp",System.currentTimeMillis() / 1000);
}
if (debug) {
debug("Execution time: " + (System.currentTimeMillis() - time) + " ms");
debug("Response: " + json);
}
return json;
}
/**
* Convert a Throwable to a JSON object so that it can be included in an error response
*
* @param pExp throwable to convert
* @param pJmxReq the request from where to take the serialization options
* @return the exception.
*/
public Object convertExceptionToJson(Throwable pExp, JmxRequest pJmxReq) {
JsonConvertOptions opts = getJsonConvertOptions(pJmxReq);
try {
JSONObject expObj =
(JSONObject) converters.getToJsonConverter().convertToJson(pExp,null,opts);
return expObj;
} catch (AttributeNotFoundException e) {
// Cannot happen, since we dont use a path
return null;
}
}
/**
* Remove MBeans
*/
public void destroy() {
try {
localDispatcher.destroy();
} catch (JMException e) {
error("Cannot unregister MBean: " + e,e);
}
}
/**
* Check whether remote access from the given client is allowed.
*
* @param pRemoteHost remote host to check against
* @param pRemoteAddr alternative IP address
* @return true if remote access is allowed
*/
public boolean isRemoteAccessAllowed(String pRemoteHost, String pRemoteAddr) {
return restrictor.isRemoteAccessAllowed(pRemoteHost, pRemoteAddr);
}
/**
* Check whether CORS access is allowed for the given origin.
*
* @param pOrigin origin URL which needs to be checked
* @param pStrictChecking whether to a strict check (i.e. server side check)
* @return true if if cors access is allowed
*/
public boolean isOriginAllowed(String pOrigin,boolean pStrictChecking) {
return restrictor.isOriginAllowed(pOrigin, pStrictChecking);
}
/**
* Log at info level
*
* @param msg to log
*/
public void info(String msg) {
logHandler.info(msg);
if (debugStore != null) {
debugStore.log(msg);
}
}
/**
* Log at debug level
*
* @param msg message to log
*/
public void debug(String msg) {
logHandler.debug(msg);
if (debugStore != null) {
debugStore.log(msg);
}
}
/**
* Log at error level.
*
* @param message message to log
* @param t ecxeption occured
*/
public void error(String message, Throwable t) {
// Must not be final so that we can mock it in EasyMock for our tests
logHandler.error(message, t);
if (debugStore != null) {
debugStore.log(message, t);
}
}
/**
* Whether debug is switched on
*
* @return true if debug is switched on
*/
public boolean isDebug() {
return debugStore != null && debugStore.isDebug();
}
/**
* Get the details for the agent which can be updated or used
*
* @return agent details
*/
public AgentDetails getAgentDetails() {
return agentDetails;
}
// ==========================================================================================================
// Initialized used for late initialisation as it is required for the agent when used
// as startup options
private final class Initializer {
private Configuration config;
private Initializer(Configuration pConfig) {
config = pConfig;
}
void init() {
BackendManager.this.init(config);
}
}
// Run initialized if not already done
private void lazyInitIfNeeded() {
if (initializer != null) {
synchronized (this) {
if (initializer != null) {
initializer.init();
initializer = null;
}
}
}
}
// Initialize this object;
private void init(Configuration pConfig) {
// Central objects
converters = new Converters();
initLimits(pConfig);
// Create and remember request dispatchers
localDispatcher = new LocalRequestDispatcher(converters,
restrictor,
pConfig,
logHandler);
ServerHandle serverHandle = localDispatcher.getServerHandle();
requestDispatchers = createRequestDispatchers(pConfig.get(DISPATCHER_CLASSES),
converters,serverHandle,restrictor);
requestDispatchers.add(localDispatcher);
// Backendstore for remembering agent state
initMBeans(pConfig);
agentDetails.setServerInfo(serverHandle.getVendor(),serverHandle.getProduct(),serverHandle.getVersion());
}
private void initLimits(Configuration pConfig) {
// Max traversal depth
if (pConfig != null) {
convertOptionsBuilder = new JsonConvertOptions.Builder(
getNullSaveIntLimit(pConfig.get(MAX_DEPTH)),
getNullSaveIntLimit(pConfig.get(MAX_COLLECTION_SIZE)),
getNullSaveIntLimit(pConfig.get(MAX_OBJECTS))
);
} else {
convertOptionsBuilder = new JsonConvertOptions.Builder();
}
}
private int getNullSaveIntLimit(String pValue) {
return pValue != null ? Integer.parseInt(pValue) : 0;
}
// Construct configured dispatchers by reflection. Returns always
// a list, an empty one if no request dispatcher should be created
private List<RequestDispatcher> createRequestDispatchers(String pClasses,
Converters pConverters,
ServerHandle pServerHandle,
Restrictor pRestrictor) {
List<RequestDispatcher> ret = new ArrayList<RequestDispatcher>();
if (pClasses != null && pClasses.length() > 0) {
String[] names = pClasses.split("\\s*,\\s*");
for (String name : names) {
ret.add(createDispatcher(name, pConverters, pServerHandle, pRestrictor));
}
}
return ret;
}
// Create a single dispatcher
private RequestDispatcher createDispatcher(String pDispatcherClass,
Converters pConverters,
ServerHandle pServerHandle, Restrictor pRestrictor) {
try {
Class clazz = ClassUtil.classForName(pDispatcherClass, getClass().getClassLoader());
if (clazz == null) {
throw new IllegalArgumentException("Couldn't lookup dispatcher " + pDispatcherClass);
}
Constructor constructor = clazz.getConstructor(Converters.class,
ServerHandle.class,
Restrictor.class);
return (RequestDispatcher)
constructor.newInstance(pConverters,
pServerHandle,
pRestrictor);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Class " + pDispatcherClass + " has invalid constructor: " + e,e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Constructor of " + pDispatcherClass + " couldn't be accessed: " + e,e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException(e);
} catch (InstantiationException e) {
throw new IllegalArgumentException(pDispatcherClass + " couldn't be instantiated: " + e,e);
}
}
// call the an appropriate request dispatcher
private JSONObject callRequestDispatcher(JmxRequest pJmxReq)
throws InstanceNotFoundException, AttributeNotFoundException, ReflectionException, MBeanException, IOException, NotChangedException {
Object retValue = null;
boolean useValueWithPath = false;
boolean found = false;
for (RequestDispatcher dispatcher : requestDispatchers) {
if (dispatcher.canHandle(pJmxReq)) {
retValue = dispatcher.dispatchRequest(pJmxReq);
useValueWithPath = dispatcher.useReturnValueWithPath(pJmxReq);
found = true;
break;
}
}
if (!found) {
throw new IllegalStateException("Internal error: No dispatcher found for handling " + pJmxReq);
}
JsonConvertOptions opts = getJsonConvertOptions(pJmxReq);
Object jsonResult =
converters.getToJsonConverter()
.convertToJson(retValue, useValueWithPath ? pJmxReq.getPathParts() : null, opts);
JSONObject jsonObject = new JSONObject();
jsonObject.put("value",jsonResult);
jsonObject.put("request",pJmxReq.toJSON());
return jsonObject;
}
private JsonConvertOptions getJsonConvertOptions(JmxRequest pJmxReq) {
return convertOptionsBuilder.
maxDepth(pJmxReq.getParameterAsInt(ConfigKey.MAX_DEPTH)).
maxCollectionSize(pJmxReq.getParameterAsInt(ConfigKey.MAX_COLLECTION_SIZE)).
maxObjects(pJmxReq.getParameterAsInt(ConfigKey.MAX_OBJECTS)).
faultHandler(pJmxReq.getValueFaultHandler()).
useAttributeFilter(pJmxReq.getPathParts() != null).
build();
}
// init various application wide stores for handling history and debug output.
private void initMBeans(Configuration pConfig) {
int maxEntries = pConfig.getAsInt(HISTORY_MAX_ENTRIES);
int maxDebugEntries = pConfig.getAsInt(DEBUG_MAX_ENTRIES);
historyStore = new HistoryStore(maxEntries);
debugStore = new DebugStore(maxDebugEntries, pConfig.getAsBoolean(DEBUG));
try {
localDispatcher.initMBeans(historyStore, debugStore);
} catch (NotCompliantMBeanException e) {
intError("Error registering config MBean: " + e, e);
} catch (MBeanRegistrationException e) {
intError("Cannot register MBean: " + e, e);
} catch (MalformedObjectNameException e) {
intError("Invalid name for config MBean: " + e, e);
}
}
// Final private error log for use in the constructor above
private void intError(String message,Throwable t) {
logHandler.error(message, t);
debugStore.log(message, t);
}
}
| |
package io.fabric8.maven.docker.assembly;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import io.fabric8.maven.docker.config.Arguments;
import io.fabric8.maven.docker.config.AssemblyConfiguration;
import io.fabric8.maven.docker.config.AssemblyMode;
import io.fabric8.maven.docker.config.BuildImageConfiguration;
import io.fabric8.maven.docker.util.AnsiLogger;
import io.fabric8.maven.docker.util.DockerFileUtil;
import io.fabric8.maven.docker.util.Logger;
import io.fabric8.maven.docker.util.MojoParameters;
import mockit.Expectations;
import mockit.Injectable;
import mockit.Mock;
import mockit.Mocked;
import mockit.Tested;
import mockit.Verifications;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.MavenArtifactRepository;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.assembly.AssemblerConfigurationSource;
import org.apache.maven.plugins.assembly.InvalidAssemblerConfigurationException;
import org.apache.maven.plugins.assembly.archive.ArchiveCreationException;
import org.apache.maven.plugins.assembly.archive.AssemblyArchiver;
import org.apache.maven.plugins.assembly.format.AssemblyFormattingException;
import org.apache.maven.plugins.assembly.io.AssemblyReadException;
import org.apache.maven.plugins.assembly.io.AssemblyReader;
import org.apache.maven.plugins.assembly.model.Assembly;
import org.apache.maven.plugin.logging.SystemStreamLog;
import org.apache.maven.plugins.assembly.model.FileItem;
import org.apache.maven.project.MavenProject;
import org.apache.maven.settings.Settings;
import org.codehaus.plexus.archiver.ArchiveEntry;
import org.codehaus.plexus.archiver.ArchivedFileSet;
import org.codehaus.plexus.archiver.FileSet;
import org.codehaus.plexus.archiver.ResourceIterator;
import org.codehaus.plexus.archiver.manager.ArchiverManager;
import org.codehaus.plexus.archiver.manager.NoSuchArchiverException;
import org.codehaus.plexus.archiver.tar.TarArchiver;
import org.codehaus.plexus.components.io.resources.PlexusIoResource;
import org.codehaus.plexus.interpolation.fixed.FixedStringSearchInterpolator;
import org.codehaus.plexus.util.ReflectionUtils;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.*;
public class DockerAssemblyManagerTest {
@Tested
private DockerAssemblyManager assemblyManager;
@Injectable
private AssemblyArchiver assemblyArchiver;
@Injectable
private AssemblyReader assemblyReader;
@Injectable
private ArchiverManager archiverManager;
@Injectable
private TrackArchiverCollection trackArchivers;
@Test
public void testNoAssembly() {
BuildImageConfiguration buildConfig = new BuildImageConfiguration();
List<AssemblyConfiguration> assemblyConfig = buildConfig.getAssemblyConfigurations();
String content =
assemblyManager.createDockerFileBuilder(
buildConfig, assemblyConfig).content();
assertFalse(content.contains("COPY"));
assertFalse(content.contains("VOLUME"));
}
@Test
public void testShellIsSet() {
BuildImageConfiguration buildConfig =
new BuildImageConfiguration.Builder().shell(
new Arguments.Builder().withShell("/bin/sh echo hello").build())
.build();
DockerFileBuilder builder =
assemblyManager.createDockerFileBuilder(buildConfig, buildConfig.getAssemblyConfigurations());
String content = builder.content();
assertTrue(content.contains("SHELL [\"/bin/sh\",\"echo\",\"hello\"]"));
}
@Test
public void assemblyFiles(@Injectable final MojoParameters mojoParams,
@Injectable final MavenProject project,
@Injectable final Assembly assembly) throws AssemblyFormattingException, ArchiveCreationException, InvalidAssemblerConfigurationException, MojoExecutionException, AssemblyReadException, IllegalAccessException {
ReflectionUtils.setVariableValueInObject(assemblyManager, "trackArchivers", trackArchivers);
new Expectations() {{
mojoParams.getOutputDirectory();
result = "target/"; times = 3;
mojoParams.getProject();
project.getBasedir();
result = ".";
assemblyReader.readAssemblies((AssemblerConfigurationSource) any);
result = Arrays.asList(assembly);
}};
BuildImageConfiguration buildConfig = createBuildConfig();
assemblyManager.getAssemblyFiles("testImage", buildConfig.getAssemblyConfigurations().get(0), mojoParams, new AnsiLogger(new SystemStreamLog(),true,"build"));
}
@Test
public void multipleAssemblyFiles(@Injectable final MojoParameters mojoParams,
@Injectable final MavenProject project,
@Injectable final Assembly assembly1,
@Injectable final Assembly assembly2) throws AssemblyFormattingException, ArchiveCreationException, InvalidAssemblerConfigurationException, MojoExecutionException, AssemblyReadException, IllegalAccessException {
ReflectionUtils.setVariableValueInObject(assemblyManager, "trackArchivers", trackArchivers);
new Expectations() {{
mojoParams.getOutputDirectory();
result = "target/"; times = 6;
mojoParams.getProject();
project.getBasedir();
result = ".";
assemblyReader.readAssemblies((AssemblerConfigurationSource) any);
result = Collections.singletonList(assembly1);
assemblyReader.readAssemblies((AssemblerConfigurationSource) any);
result = Collections.singletonList(assembly2);
}};
BuildImageConfiguration buildConfig = createBuildConfigMultiAssembly();
AssemblyFiles files = assemblyManager.getAssemblyFiles("testImage", buildConfig.getAssemblyConfigurations().get(0), mojoParams, new AnsiLogger(new SystemStreamLog(),true,"build"));
assertNotNull(files);
files = assemblyManager.getAssemblyFiles("testImage", buildConfig.getAssemblyConfigurations().get(1), mojoParams, new AnsiLogger(new SystemStreamLog(),true,"build"));
assertNotNull(files);
}
@Test
public void testCopyValidVerifyGivenDockerfile(@Injectable final Logger logger) throws IOException {
BuildImageConfiguration buildConfig = createBuildConfig();
assemblyManager.verifyGivenDockerfile(
new File(getClass().getResource("/docker/Dockerfile_assembly_verify_copy_valid.test").getPath()),
buildConfig,
createInterpolator(buildConfig),
logger);
new Verifications() {{
logger.warn(anyString, (Object []) any); times = 0;
}};
}
@Test
public void testCopyInvalidVerifyGivenDockerfile(@Injectable final Logger logger) throws IOException {
BuildImageConfiguration buildConfig = createBuildConfig();
assemblyManager.verifyGivenDockerfile(
new File(getClass().getResource("/docker/Dockerfile_assembly_verify_copy_invalid.test").getPath()),
buildConfig, createInterpolator(buildConfig),
logger);
new Verifications() {{
logger.warn(anyString, (Object []) any); times = 1;
}};
}
@Test
public void testCopyChownValidVerifyGivenDockerfile(@Injectable final Logger logger) throws IOException {
BuildImageConfiguration buildConfig = createBuildConfig();
assemblyManager.verifyGivenDockerfile(
new File(getClass().getResource("/docker/Dockerfile_assembly_verify_copy_chown_valid.test").getPath()),
buildConfig,
createInterpolator(buildConfig),
logger);
new Verifications() {{
logger.warn(anyString, (Object []) any); times = 0;
}};
}
@Test
public void testMultipleCopyValidVerifyGivenDockerfile(@Injectable final Logger logger) throws IOException {
BuildImageConfiguration buildConfig = createBuildConfigMultiAssembly();
assemblyManager.verifyGivenDockerfile(
new File(getClass().getResource("/docker/Dockerfile_assembly_verify_multi_copy_valid.test").getPath()),
buildConfig,
createInterpolator(buildConfig),
logger);
new Verifications() {{
logger.warn(anyString, (Object []) any); times = 0;
}};
}
@Test
public void testMultipleCopyInvalidVerifyGivenDockerfile(@Injectable final Logger logger) throws IOException {
BuildImageConfiguration buildConfig = createBuildConfigMultiAssembly();
assemblyManager.verifyGivenDockerfile(
new File(getClass().getResource("/docker/Dockerfile_assembly_verify_multi_copy_invalid.test").getPath()),
buildConfig, createInterpolator(buildConfig),
logger);
new Verifications() {{
logger.warn(anyString, (Object []) any); times = 2;
}};
}
@Test
public void testMultipleCopyChownValidVerifyGivenDockerfile(@Injectable final Logger logger) throws IOException {
BuildImageConfiguration buildConfig = createBuildConfigMultiAssembly();
assemblyManager.verifyGivenDockerfile(
new File(getClass().getResource("/docker/Dockerfile_assembly_verify_multi_copy_chown_valid.test").getPath()),
buildConfig,
createInterpolator(buildConfig),
logger);
new Verifications() {{
logger.warn(anyString, (Object []) any); times = 0;
}};
}
@Test
public void testArchiveCreationDockerfileNoAssembly(@Injectable final TarArchiver tarArchiver,
@Injectable final Logger logger) throws MojoExecutionException, NoSuchArchiverException {
MojoParameters mojoParams = mockMojoParams(mockMavenProject());
BuildImageConfiguration buildImageConfiguration = new BuildImageConfiguration.Builder()
.dockerFile(DockerAssemblyManagerTest.class.getResource("/docker/Dockerfile.test").getPath())
.build();
buildImageConfiguration.initAndValidate(logger);
File tarArchive = assemblyManager.createDockerTarArchive("test_image", mojoParams, buildImageConfiguration, logger, null);
assertNotNull(tarArchive);
new Verifications() {{
archiverManager.getArchiver("tar");
times = 1;
List<FileSet> fileSets = new ArrayList<>();
tarArchiver.addFileSet(withCapture(fileSets));
assertEquals(2, fileSets.size());
assertEquals("build", fileSets.get(0).getDirectory().getName());
assertNull(fileSets.get(0).getIncludes());
assertNull(fileSets.get(0).getExcludes());
assertArrayEquals(new String[]{"target/**", "Dockerfile.test"}, fileSets.get(1).getExcludes());
assertNull(fileSets.get(1).getIncludes());
}};
}
@Test
public void testArchiveCreationDockerfileWithDirAssembly(@Injectable final TarArchiver tarArchiver,
@Injectable final Logger logger) throws MojoExecutionException, NoSuchArchiverException {
MojoParameters mojoParams = mockMojoParams(mockMavenProject());
BuildImageConfiguration buildImageConfiguration = new BuildImageConfiguration.Builder()
.dockerFile(DockerAssemblyManagerTest.class.getResource("/docker/Dockerfile.test").getPath())
.assembly(new AssemblyConfiguration.Builder()
.mode(AssemblyMode.dir.name())
.build()
)
.build();
buildImageConfiguration.initAndValidate(logger);
File tarArchive = assemblyManager.createDockerTarArchive("test_image", mojoParams, buildImageConfiguration, logger, null);
assertNotNull(tarArchive);
new Verifications() {{
archiverManager.getArchiver("tar");
times = 1;
List<FileSet> fileSets = new ArrayList<>();
tarArchiver.addFileSet(withCapture(fileSets));
assertEquals(2, fileSets.size());
assertEquals("build", fileSets.get(0).getDirectory().getName());
assertNull(fileSets.get(0).getIncludes());
assertNull(fileSets.get(0).getExcludes());
assertArrayEquals(new String[]{"target/**", "Dockerfile.test"}, fileSets.get(1).getExcludes());
assertNull(fileSets.get(1).getIncludes());
}};
}
@Test
public void testArchiveCreationDockerfileWithArchiveAssembly(@Injectable final TarArchiver tarArchiver,
@Injectable final Logger logger) throws MojoExecutionException, NoSuchArchiverException {
MojoParameters mojoParams = mockMojoParams(mockMavenProject());
BuildImageConfiguration buildImageConfiguration = new BuildImageConfiguration.Builder()
.dockerFile(DockerAssemblyManagerTest.class.getResource("/docker/Dockerfile.test").getPath())
.assembly(new AssemblyConfiguration.Builder()
.mode(AssemblyMode.tar.name())
.assemblyDef(new Assembly())
.build()
)
.build();
buildImageConfiguration.initAndValidate(logger);
File tarArchive = assemblyManager.createDockerTarArchive("test_image", mojoParams, buildImageConfiguration, logger, null);
assertNotNull(tarArchive);
new Verifications() {{
archiverManager.getArchiver("tar");
times = 1;
List<FileSet> fileSets = new ArrayList<>();
tarArchiver.addFileSet(withCapture(fileSets));
assertEquals(1, fileSets.size());
assertArrayEquals(new String[]{"target/**", "Dockerfile.test"}, fileSets.get(0).getExcludes());
assertNull(fileSets.get(0).getIncludes());
tarArchiver.addFile(new File("target/test_image/build/Dockerfile.test"), "Dockerfile.test");
List<ArchivedFileSet> archivedFileSets = new ArrayList<>();
tarArchiver.addArchivedFileSet(withCapture(archivedFileSets));
assertEquals(1, archivedFileSets.size());
assertEquals(new File("target/test_image/build/maven.tar"), archivedFileSets.get(0).getArchive());
assertEquals("maven/", archivedFileSets.get(0).getPrefix());
}};
}
@Test
public void testArchiveCreationDockerfileWithMultipleArchiveAssemblies(@Injectable final TarArchiver tarArchiver,
@Injectable final Logger logger) throws MojoExecutionException, NoSuchArchiverException {
MojoParameters mojoParams = mockMojoParams(mockMavenProject());
BuildImageConfiguration buildImageConfiguration = new BuildImageConfiguration.Builder()
.dockerFile(DockerAssemblyManagerTest.class.getResource("/docker/Dockerfile.test").getPath())
.assemblies(Arrays.asList(
new AssemblyConfiguration.Builder()
.name("first")
.mode(AssemblyMode.tar.name())
.assemblyDef(new Assembly())
.build(),
new AssemblyConfiguration.Builder()
.name("second")
.mode(AssemblyMode.tar.name())
.assemblyDef(new Assembly())
.build()
))
.build();
buildImageConfiguration.initAndValidate(logger);
File tarArchive = assemblyManager.createDockerTarArchive("test_image", mojoParams, buildImageConfiguration, logger, null);
assertNotNull(tarArchive);
new Verifications() {{
archiverManager.getArchiver("tar");
times = 1;
List<FileSet> fileSets = new ArrayList<>();
tarArchiver.addFileSet(withCapture(fileSets));
assertEquals(1, fileSets.size());
assertArrayEquals(new String[]{"target/**", "Dockerfile.test"}, fileSets.get(0).getExcludes());
assertNull(fileSets.get(0).getIncludes());
tarArchiver.addFile(new File("target/test_image/build/Dockerfile.test"), "Dockerfile.test");
List<ArchivedFileSet> archivedFileSets = new ArrayList<>();
tarArchiver.addArchivedFileSet(withCapture(archivedFileSets));
assertEquals(2, archivedFileSets.size());
assertEquals(new File("target/test_image/build/first.tar"), archivedFileSets.get(0).getArchive());
assertEquals("first/", archivedFileSets.get(0).getPrefix());
assertEquals(new File("target/test_image/build/second.tar"), archivedFileSets.get(1).getArchive());
assertEquals("second/", archivedFileSets.get(1).getPrefix());
}};
}
@Test
public void testArchiveCreationNoDockerfileWithMultipleArchiveAssemblies(@Injectable final TarArchiver tarArchiver,
@Injectable final Logger logger) throws MojoExecutionException, NoSuchArchiverException {
MojoParameters mojoParams = mockMojoParams(mockMavenProject());
BuildImageConfiguration buildImageConfiguration = new BuildImageConfiguration.Builder()
.assemblies(Arrays.asList(
new AssemblyConfiguration.Builder()
.name("first")
.mode(AssemblyMode.tar.name())
.assemblyDef(new Assembly())
.build(),
new AssemblyConfiguration.Builder()
.name("second")
.mode(AssemblyMode.tar.name())
.assemblyDef(new Assembly())
.build()
))
.build();
buildImageConfiguration.initAndValidate(logger);
File tarArchive = assemblyManager.createDockerTarArchive("test_image", mojoParams, buildImageConfiguration, logger, null);
assertNotNull(tarArchive);
new Verifications() {{
archiverManager.getArchiver("tar");
times = 1;
tarArchiver.addFile(new File("target/test_image/build/Dockerfile"), "Dockerfile");
List<ArchivedFileSet> archivedFileSets = new ArrayList<>();
tarArchiver.addArchivedFileSet(withCapture(archivedFileSets));
assertEquals(2, archivedFileSets.size());
assertEquals(new File("target/test_image/build/first.tar"), archivedFileSets.get(0).getArchive());
assertEquals("first/", archivedFileSets.get(0).getPrefix());
assertEquals(new File("target/test_image/build/second.tar"), archivedFileSets.get(1).getArchive());
assertEquals("second/", archivedFileSets.get(1).getPrefix());
}};
}
@Test
public void testArchiveCreationNoDockerfileWithExecutableAssemblies(@Mocked final PlexusIoResource resource,
@Mocked final ArchiveEntry archiveEntry,
@Mocked final TarArchiver tarArchiver,
@Injectable final Logger logger) throws MojoExecutionException, NoSuchArchiverException, IOException {
MojoParameters mojoParams = mockMojoParams(mockMavenProject());
FileItem testFile = new FileItem();
testFile.setDestName("test.txt");
testFile.setSource("test.in");
Assembly testAssembly = new Assembly();
testAssembly.addFile(testFile);
new Expectations() {{
archiveEntry.getName();
result = "test";
archiveEntry.getMode();
result = 0644;
archiveEntry.getResource();
result = resource;
tarArchiver.getResources();
result = new ResourceIterator() {
boolean consumed = false;
@Override
public boolean hasNext() {
return !consumed;
}
@Override
public ArchiveEntry next() {
if (consumed) {
return null;
}
consumed = true;
return archiveEntry;
}
};
}};
BuildImageConfiguration buildImageConfiguration = new BuildImageConfiguration.Builder()
.assemblies(Collections.singletonList(
new AssemblyConfiguration.Builder()
.name("first")
.mode(AssemblyMode.tar.name())
.assemblyDef(testAssembly)
.permissions(AssemblyConfiguration.PermissionMode.exec.name())
.build()
))
.build();
buildImageConfiguration.initAndValidate(logger);
File tarArchive = assemblyManager.createDockerTarArchive("test_image", mojoParams, buildImageConfiguration, logger, null);
assertNotNull(tarArchive);
new Verifications() {{
archiverManager.getArchiver("tar");
times = 1;
tarArchiver.addFile(new File("target/test_image/build/Dockerfile"), "Dockerfile");
List<ArchivedFileSet> archivedFileSets = new ArrayList<>();
tarArchiver.addArchivedFileSet(withCapture(archivedFileSets));
assertEquals(1, archivedFileSets.size());
assertEquals(new File("target/test_image/build/first.tar"), archivedFileSets.get(0).getArchive());
assertEquals("first/", archivedFileSets.get(0).getPrefix());
tarArchiver.addResource((PlexusIoResource) any, "test", 0755);
}};
}
private BuildImageConfiguration createBuildConfig() {
return new BuildImageConfiguration.Builder()
.assembly(new AssemblyConfiguration.Builder()
.descriptorRef("artifact")
.build())
.build();
}
private BuildImageConfiguration createBuildConfigMultiAssembly() {
return new BuildImageConfiguration.Builder()
.from("busybox:latest")
.assemblies(
Arrays.asList(
new AssemblyConfiguration.Builder()
.descriptorRef("dependencies")
.name("deps")
.build(),
new AssemblyConfiguration.Builder()
.descriptorRef("artifact")
.build()
))
.build();
}
private FixedStringSearchInterpolator createInterpolator(BuildImageConfiguration buildConfig) {
MavenProject project = mockMavenProject();
return DockerFileUtil.createInterpolator(mockMojoParams(project), buildConfig.getFilter());
}
private MavenProject mockMavenProject() {
MavenProject project = new MavenProject();
project.setArtifactId("docker-maven-plugin");
return project;
}
private MojoParameters mockMojoParams(MavenProject project) {
Settings settings = new Settings();
ArtifactRepository localRepository = new MavenArtifactRepository() {
@Mock
public String getBasedir() {
return "repository";
}
};
@SuppressWarnings("deprecation")
MavenSession session = new MavenSession(null, settings, localRepository, null, null, Collections.<String>emptyList(), ".", null, null, new Date());
return new MojoParameters(session, project, null, null, null, settings, "src", "target", Collections.singletonList(project));
}
}
| |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.runtime.value;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.text.MessageFormat;
import com.asakusafw.runtime.io.util.WritableRawComparable;
/**
* Represents a {@code long} value which can be {@code null}.
*/
public final class LongOption extends ValueOption<LongOption> {
private long value;
/**
* Creates a new instance which represents {@code null} value.
*/
public LongOption() {
this.nullValue = true;
}
/**
* Creates a new instance which represents the specified value.
* @param value the initial value
*/
public LongOption(long value) {
this.value = value;
this.nullValue = false;
}
/**
* Returns the value which this object represents.
* @return the value which this object represents, never {@code null}
* @throws NullPointerException if this object represents {@code null}
*/
public long get() {
if (nullValue) {
throw new NullPointerException();
}
return value;
}
/**
* Returns the value which this object represents.
* @param alternate the alternative value for {@code null}
* @return the value which this object represents, or the alternative one if this object represents {@code null}
*/
public long or(long alternate) {
if (nullValue) {
return alternate;
}
return value;
}
/**
* Adds a value into this object.
* @param delta the value to be add
* @throws NullPointerException if this object represents {@code null}
*/
public void add(long delta) {
if (nullValue) {
throw new NullPointerException();
}
this.value += delta;
}
/**
* Adds a value into this object.
* @param other the value to be add, or {@code null} to do nothing
* @throws NullPointerException if this object represents {@code null}
*/
public void add(LongOption other) {
if (nullValue) {
throw new NullPointerException();
}
if (other.nullValue) {
return;
}
this.value += other.value;
}
/**
* Sets the value.
* @param newValue the value (nullable)
* @return this
* @see ValueOption#setNull()
* @deprecated Application developer should not use this method directly
*/
@Deprecated
public LongOption modify(long newValue) {
this.nullValue = false;
this.value = newValue;
return this;
}
@Override
@Deprecated
public void copyFrom(LongOption optionOrNull) {
if (optionOrNull == null || optionOrNull.nullValue) {
this.nullValue = true;
} else {
this.nullValue = false;
this.value = optionOrNull.value;
}
}
@Override
public int hashCode() {
final int prime = 31;
if (isNull()) {
return 1;
}
int result = 1;
result = prime * result + (int) (value ^ (value >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
LongOption other = (LongOption) obj;
if (nullValue != other.nullValue) {
return false;
}
if (nullValue == false && value != other.value) {
return false;
}
return true;
}
/**
* Returns whether both this object and the specified value represents an equivalent value or not.
* @param other the target value
* @return {@code true} if this object has the specified value, otherwise {@code false}
*/
public boolean has(long other) {
if (isNull()) {
return false;
}
return value == other;
}
@Override
public int compareTo(WritableRawComparable o) {
LongOption other = (LongOption) o;
if (nullValue | other.nullValue) {
if (nullValue & other.nullValue) {
return 0;
}
return nullValue ? -1 : +1;
}
return Long.compare(value, other.value);
}
@Override
public String toString() {
if (isNull()) {
return String.valueOf((Object) null);
} else {
return String.valueOf(value);
}
}
@Override
public void write(DataOutput out) throws IOException {
if (isNull()) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeLong(value - Long.MIN_VALUE);
}
}
@SuppressWarnings("deprecation")
@Override
public void readFields(DataInput in) throws IOException {
if (in.readBoolean()) {
modify(in.readLong() + Long.MIN_VALUE);
} else {
setNull();
}
}
@SuppressWarnings("deprecation")
@Override
public int restore(byte[] bytes, int offset, int limit) throws IOException {
if (limit - offset == 0) {
throw new IOException(MessageFormat.format(
"Cannot restore a long field ({0})",
"invalid length"));
}
if (bytes[offset + 0] == 0) {
setNull();
return 1;
} else if (limit - offset >= 1 + 1) {
modify(ByteArrayUtil.readLong(bytes, offset + 1) + Long.MIN_VALUE);
return 8 + 1;
} else {
throw new IOException(MessageFormat.format(
"Cannot restore a long field ({0})",
"invalid length"));
}
}
@Override
public int getSizeInBytes(byte[] buf, int offset) throws IOException {
return getBytesLength(buf, offset, buf.length - offset);
}
@Override
public int compareInBytes(byte[] b1, int o1, byte[] b2, int o2) throws IOException {
return compareBytes(b1, o1, b1.length - o1, b2, o2, b2.length - o2);
}
/**
* Returns the actual number of bytes from the serialized byte array.
* @param bytes the target byte array
* @param offset the beginning index in the byte array (inclusive)
* @param length the limit length of the byte array
* @return the comparison result
*/
public static int getBytesLength(byte[] bytes, int offset, int length) {
return bytes[offset] == 0 ? 1 : 9;
}
/**
* Compares between the two objects in serialized form.
* @param b1 the first byte array to be compared
* @param s1 the beginning index in {@code b1}
* @param l1 the limit byte size in {@code b1}
* @param b2 the second byte array to be compared
* @param s2 the beginning index in {@code b2}
* @param l2 the limit byte size in {@code b2}
* @return the comparison result
*/
public static int compareBytes(
byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
int len1 = getBytesLength(b1, s1, l1);
int len2 = getBytesLength(b2, s2, l2);
return ByteArrayUtil.compare(b1, s1, len1, b2, s2, len2);
}
}
| |
/**
* Appcelerator Titanium Mobile
* Copyright (c) 2009-2013 by Appcelerator, Inc. All Rights Reserved.
* Licensed under the terms of the Apache Public License
* Please see the LICENSE included with this distribution for details.
*/
package ti.modules.titanium.ui.widget;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import org.appcelerator.kroll.KrollDict;
import org.appcelerator.kroll.KrollProxy;
import org.appcelerator.kroll.common.AsyncResult;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.kroll.common.TiMessenger;
import org.appcelerator.titanium.TiApplication;
import org.appcelerator.titanium.TiBlob;
import org.appcelerator.titanium.TiC;
import org.appcelerator.titanium.TiLifecycle.OnLifecycleEvent;
import org.appcelerator.titanium.proxy.TiViewProxy;
import org.appcelerator.titanium.util.TiConvert;
import org.appcelerator.titanium.util.TiDownloadListener;
import org.appcelerator.titanium.util.TiDownloadManager;
import org.appcelerator.titanium.util.TiImageLruCache;
import org.appcelerator.titanium.util.TiLoadImageListener;
import org.appcelerator.titanium.util.TiLoadImageManager;
import org.appcelerator.titanium.util.TiResponseCache;
import org.appcelerator.titanium.util.TiUrl;
import org.appcelerator.titanium.view.TiDrawableReference;
import org.appcelerator.titanium.view.TiUIView;
import ti.modules.titanium.filesystem.FileProxy;
import ti.modules.titanium.ui.ImageViewProxy;
import ti.modules.titanium.ui.ScrollViewProxy;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.Bundle;
import android.view.View;
import android.view.ViewParent;
public class TiUIImageView extends TiUIView implements OnLifecycleEvent, Handler.Callback
{
private static final String TAG = "TiUIImageView";
private static final int FRAME_QUEUE_SIZE = 5;
public static final int MIN_DURATION = 30;
public static final int DEFAULT_DURATION = 200;
private Timer timer;
private Animator animator;
private Loader loader;
private Thread loaderThread;
private AtomicBoolean animating = new AtomicBoolean(false);
private AtomicBoolean isLoading = new AtomicBoolean(false);
private AtomicBoolean isStopping = new AtomicBoolean(false);
private boolean reverse = false;
private boolean paused = false;
private boolean firedLoad;
private ImageViewProxy imageViewProxy;
private int currentDuration;
private ArrayList<TiDrawableReference> imageSources;
private TiDrawableReference defaultImageSource;
private TiDownloadListener downloadListener;
private TiLoadImageListener loadImageListener;
private Object releasedLock = new Object();
private Handler mainHandler = new Handler(Looper.getMainLooper(), this);
private static final int SET_IMAGE = 10001;
private static final int START = 10002;
private static final int STOP = 10003;
private static final int SET_TINT = 10004;
// This handles the memory cache of images.
private TiImageLruCache mMemoryCache = TiImageLruCache.getInstance();
public TiUIImageView(final TiViewProxy proxy)
{
super(proxy);
imageViewProxy = (ImageViewProxy) proxy;
Log.d(TAG, "Creating an ImageView", Log.DEBUG_MODE);
TiImageView view = new TiImageView(proxy.getActivity(), proxy);
downloadListener = new TiDownloadListener() {
@Override
public void downloadTaskFinished(URI uri)
{
if (!TiResponseCache.peek(uri)) {
// The requested image did not make it into our TiResponseCache,
// possibly because it had a header forbidding that. Now get it
// via the "old way" (not relying on cache).
TiLoadImageManager.getInstance().load(TiDrawableReference.fromUrl(imageViewProxy, uri.toString()),
loadImageListener);
}
}
@Override
public void downloadTaskFailed(URI uri)
{
// If the download failed, fire an error event
fireError("Download Failed", uri.toString());
}
// Handle decoding and caching in the background thread so it won't block UI.
@Override
public void postDownload(URI uri)
{
if (TiResponseCache.peekFollowingRedirects(uri)) {
handleCacheAndSetImage(TiDrawableReference.fromUrl(imageViewProxy, uri.toString()));
}
}
};
loadImageListener = new TiLoadImageListener() {
@Override
public void loadImageFinished(int hash, Bitmap bitmap)
{
// Cache the image
if (bitmap != null) {
if (mMemoryCache.get(hash) == null) {
mMemoryCache.put(hash, bitmap);
}
// Update UI if the current image source has not been changed.
if (imageSources != null && imageSources.size() == 1) {
TiDrawableReference imgsrc = imageSources.get(0);
if (imgsrc == null) {
return;
}
if (imgsrc.hashCode() == hash
|| (imgsrc.getUrl() != null
&& TiDrawableReference
.fromUrl(imageViewProxy, TiUrl.getCleanUri(imgsrc.getUrl()).toString())
.hashCode()
== hash)) {
setImage(bitmap);
if (!firedLoad) {
fireLoad(TiC.PROPERTY_IMAGE);
firedLoad = true;
}
}
}
}
}
@Override
public void loadImageFailed()
{
Log.w(TAG, "Unable to load image", Log.DEBUG_MODE);
}
};
setNativeView(view);
// TODO proxy.getActivity().addOnLifecycleEventListener(this);
}
@Override
public void setProxy(TiViewProxy proxy)
{
super.setProxy(proxy);
imageViewProxy = (ImageViewProxy) proxy;
}
private TiImageView getView()
{
return (TiImageView) nativeView;
}
protected View getParentView()
{
if (nativeView == null) {
return null;
}
ViewParent parent = nativeView.getParent();
if (parent instanceof View) {
return (View) parent;
}
if (parent == null) {
TiViewProxy parentProxy = proxy.getParent();
if (parentProxy != null) {
TiUIView parentTiUi = parentProxy.peekView();
if (parentTiUi != null) {
return parentTiUi.getNativeView();
}
}
}
return null;
}
public boolean handleMessage(Message msg)
{
switch (msg.what) {
case SET_IMAGE:
AsyncResult result = (AsyncResult) msg.obj;
handleSetImage((Bitmap) result.getArg());
result.setResult(null);
return true;
case START:
handleStart();
return true;
case STOP:
handleStop();
return true;
case SET_TINT:
handleTint((String) msg.obj);
return true;
default:
return false;
}
}
private void handleCacheAndSetImage(TiDrawableReference imageref)
{
// Don't update UI if the current image source has been changed.
if (imageSources != null && imageSources.size() == 1) {
TiDrawableReference imgsrc = imageSources.get(0);
if (imgsrc == null || imgsrc.getUrl() == null) {
return;
}
if (imageref.equals(imgsrc)
|| imageref.equals(
TiDrawableReference.fromUrl(imageViewProxy, TiUrl.getCleanUri(imgsrc.getUrl()).toString()))) {
int hash = imageref.hashCode();
Bitmap bitmap = imageref.getBitmap(true);
if (bitmap != null) {
if (mMemoryCache.get(hash) == null) {
mMemoryCache.put(hash, bitmap);
}
setImage(bitmap);
if (!firedLoad) {
fireLoad(TiC.PROPERTY_IMAGE);
firedLoad = true;
}
}
}
}
}
private void setImage(final Bitmap bitmap)
{
if (!TiApplication.isUIThread()) {
TiMessenger.sendBlockingMainMessage(mainHandler.obtainMessage(SET_IMAGE), bitmap);
} else {
handleSetImage(bitmap);
}
}
private void handleSetImage(final Bitmap bitmap)
{
TiImageView view = getView();
if (view != null) {
view.setImageBitmap(bitmap);
}
}
private class BitmapWithIndex
{
public BitmapWithIndex(Bitmap b, int i)
{
this.bitmap = b;
this.index = i;
}
public Bitmap bitmap;
public int index;
}
private class Loader implements Runnable
{
private ArrayBlockingQueue<BitmapWithIndex> bitmapQueue;
private LinkedList<Integer> hashTable;
private int waitTime = 0;
private int sleepTime = 50; //ms
private int repeatIndex = 0;
public Loader()
{
bitmapQueue = new ArrayBlockingQueue<BitmapWithIndex>(FRAME_QUEUE_SIZE);
hashTable = new LinkedList<Integer>();
}
private boolean isRepeating()
{
int repeatCount = getRepeatCount();
if (repeatCount <= 0) {
return true;
}
return repeatIndex < repeatCount;
}
private int getStart()
{
if (imageSources == null) {
return 0;
}
if (reverse) {
return imageSources.size() - 1;
}
return 0;
}
private boolean isNotFinalFrame(int frame)
{
synchronized (releasedLock)
{
if (imageSources == null) {
return false;
}
if (reverse) {
return frame >= 0;
}
return frame < imageSources.size();
}
}
private int getCounter()
{
if (reverse) {
return -1;
}
return 1;
}
public void run()
{
if (getProxy() == null) {
Log.d(TAG, "Multi-image loader exiting early because proxy has been gc'd");
return;
}
repeatIndex = 0;
isLoading.set(true);
firedLoad = false;
boolean shouldCache = getRepeatCount() >= 5 ? true : false;
topLoop:
while (isRepeating()) {
if (imageSources == null) {
break;
}
long time = System.currentTimeMillis();
for (int j = getStart(); imageSources != null && isNotFinalFrame(j); j += getCounter()) {
if (bitmapQueue.size() == FRAME_QUEUE_SIZE && !firedLoad) {
fireLoad(TiC.PROPERTY_IMAGES);
firedLoad = true;
}
if (paused && !Thread.currentThread().isInterrupted()) {
try {
Log.i(TAG, "Pausing", Log.DEBUG_MODE);
// User backed-out while animation running
if (loader == null) {
break;
}
synchronized (this)
{
wait();
}
Log.i(TAG, "Waking from pause.", Log.DEBUG_MODE);
// In the meantime, while paused, user could have backed out, which leads
// to release(), which in turn leads to nullified imageSources.
if (imageSources == null) {
break topLoop;
}
} catch (InterruptedException e) {
Log.w(TAG, "Interrupted from paused state.");
}
}
if (!isLoading.get() || isStopping.get()) {
break topLoop;
}
waitTime = 0;
synchronized (releasedLock)
{
if (imageSources == null || j >= imageSources.size()) {
break topLoop;
}
TiDrawableReference imageRef = imageSources.get(j);
Bitmap b = null;
if (shouldCache) {
int hash = imageRef.hashCode();
b = mMemoryCache.get(hash);
if (b == null) {
Log.i(TAG, "Image isn't cached");
b = imageRef.getBitmap(true);
mMemoryCache.put(hash, b);
hashTable.add(hash);
}
} else {
b = imageRef.getBitmap(true);
}
BitmapWithIndex bIndex = new BitmapWithIndex(b, j);
while (waitTime < getDuration() * imageSources.size()) {
try {
if (!bitmapQueue.offer(bIndex)) {
if (isStopping.get()) {
break;
}
Thread.sleep(sleepTime);
waitTime += sleepTime;
} else {
break;
}
} catch (InterruptedException e) {
Log.w(TAG, "Interrupted while adding Bitmap into bitmapQueue");
break;
}
}
}
repeatIndex++;
}
Log.d(TAG, "TIME TO LOAD FRAMES: " + (System.currentTimeMillis() - time) + "ms", Log.DEBUG_MODE);
}
isLoading.set(false);
//clean out the cache after animation
while (!hashTable.isEmpty()) {
mMemoryCache.remove(hashTable.pop());
}
}
public ArrayBlockingQueue<BitmapWithIndex> getBitmapQueue()
{
return bitmapQueue;
}
}
private void setImages()
{
if (imageSources == null || imageSources.size() == 0) {
fireError("Missing Images", null);
return;
}
if (loader == null) {
paused = false;
isStopping.set(false);
firedLoad = false;
loader = new Loader();
loaderThread = new Thread(loader);
Log.d(TAG, "STARTING LOADER THREAD " + loaderThread + " for " + this, Log.DEBUG_MODE);
loaderThread.start();
}
}
public double getDuration()
{
if (proxy.getProperty(TiC.PROPERTY_DURATION) != null) {
double duration = TiConvert.toDouble(proxy.getProperty(TiC.PROPERTY_DURATION));
if (duration < MIN_DURATION) {
return MIN_DURATION;
} else {
return duration;
}
}
proxy.setProperty(TiC.PROPERTY_DURATION, DEFAULT_DURATION);
return DEFAULT_DURATION;
}
public int getRepeatCount()
{
if (proxy.hasProperty(TiC.PROPERTY_REPEAT_COUNT)) {
return TiConvert.toInt(proxy.getProperty(TiC.PROPERTY_REPEAT_COUNT));
}
return 0;
}
private void fireLoad(String state)
{
KrollDict data = new KrollDict();
data.put(TiC.EVENT_PROPERTY_STATE, state);
fireEvent(TiC.EVENT_LOAD, data);
}
private void fireStart()
{
KrollDict data = new KrollDict();
fireEvent(TiC.EVENT_START, data);
}
private void fireChange(int index)
{
KrollDict data = new KrollDict();
data.put(TiC.EVENT_PROPERTY_INDEX, index);
fireEvent(TiC.EVENT_CHANGE, data);
}
private void fireStop()
{
KrollDict data = new KrollDict();
fireEvent(TiC.EVENT_STOP, data);
}
private void fireError(String message, String imageUrl)
{
KrollDict data = new KrollDict();
data.putCodeAndMessage(TiC.ERROR_CODE_UNKNOWN, message);
if (imageUrl != null) {
data.put(TiC.PROPERTY_IMAGE, imageUrl);
}
fireEvent(TiC.EVENT_ERROR, data);
}
private class Animator extends TimerTask
{
private Loader loader;
public Animator(Loader loader)
{
this.loader = loader;
}
public void run()
{
boolean waitOnResume = false;
try {
if (paused) {
synchronized (this)
{
KrollDict data = new KrollDict();
fireEvent(TiC.EVENT_PAUSE, data);
waitOnResume = true;
wait();
}
}
ArrayBlockingQueue<BitmapWithIndex> bitmapQueue = loader.getBitmapQueue();
//Fire stop event when animation finishes
if (!isLoading.get() && bitmapQueue.isEmpty()) {
fireStop();
}
BitmapWithIndex b = bitmapQueue.take();
Log.d(TAG, "set image: " + b.index, Log.DEBUG_MODE);
setImage(b.bitmap);
fireChange(b.index);
// When the animation is paused, the timer will pause in the middle of a period.
// When the animation resumes, the timer resumes from where it left off. As a result, it will look like
// one frame is left out when resumed (TIMOB-10207).
// To avoid this, we force the thread to wait for one period on resume.
if (waitOnResume) {
Thread.sleep(currentDuration);
waitOnResume = false;
}
} catch (InterruptedException e) {
Log.e(TAG, "Loader interrupted");
}
}
}
public void start()
{
if (!TiApplication.isUIThread()) {
Message message = mainHandler.obtainMessage(START);
message.sendToTarget();
} else {
handleStart();
}
}
public void handleStart()
{
if (animator == null) {
timer = new Timer();
if (loader == null) {
loader = new Loader();
loaderThread = new Thread(loader);
Log.d(TAG, "STARTING LOADER THREAD " + loaderThread + " for " + this, Log.DEBUG_MODE);
}
animator = new Animator(loader);
if (!animating.get() && !loaderThread.isAlive()) {
isStopping.set(false);
loaderThread.start();
}
currentDuration = (int) getDuration();
animating.set(true);
fireStart();
timer.schedule(animator, currentDuration, currentDuration);
} else {
resume();
}
}
public void pause()
{
paused = true;
}
public void resume()
{
paused = false;
if (animator != null) {
synchronized (animator)
{
animator.notify();
}
}
if (loader != null) {
synchronized (loader)
{
loader.notify();
}
}
}
public void stop()
{
if (!TiApplication.isUIThread()) {
Message message = mainHandler.obtainMessage(STOP);
message.sendToTarget();
} else {
handleStop();
}
}
public void handleStop()
{
if (timer != null) {
timer.cancel();
}
animating.set(false);
isStopping.set(true);
if (loaderThread != null) {
try {
loaderThread.join();
} catch (InterruptedException e) {
Log.e(TAG, "LoaderThread termination interrupted");
}
loaderThread = null;
}
if (loader != null) {
synchronized (loader)
{
loader.notify();
}
}
loader = null;
timer = null;
animator = null;
paused = false;
fireStop();
}
private void setImageSource(Object object)
{
imageSources = new ArrayList<TiDrawableReference>();
if (object instanceof Object[]) {
for (Object o : (Object[]) object) {
imageSources.add(TiDrawableReference.fromObject(getProxy(), o));
}
} else {
imageSources.add(TiDrawableReference.fromObject(getProxy(), object));
}
}
private void setImageSource(TiDrawableReference source)
{
imageSources = new ArrayList<TiDrawableReference>();
imageSources.add(source);
}
private void setDefaultImageSource(Object object)
{
if (object instanceof FileProxy) {
defaultImageSource = TiDrawableReference.fromFile(proxy.getActivity(), ((FileProxy) object).getBaseFile());
} else if (object instanceof String) {
defaultImageSource = TiDrawableReference.fromUrl(proxy, (String) object);
} else {
defaultImageSource = TiDrawableReference.fromObject(proxy, object);
}
}
private void setImageInternal()
{
// Set default image or clear previous image first.
if (defaultImageSource != null) {
setDefaultImage();
} else {
setImage(null);
}
if (imageSources == null || imageSources.size() == 0 || imageSources.get(0) == null
|| imageSources.get(0).isTypeNull()) {
return;
}
if (imageSources.size() == 1) {
TiDrawableReference imageref = imageSources.get(0);
// Check if the image is cached in memory
int hash = imageref.hashCode();
Bitmap bitmap = mMemoryCache.get(hash);
if (bitmap != null) {
if (!bitmap.isRecycled()) {
setImage(bitmap);
if (!firedLoad) {
fireLoad(TiC.PROPERTY_IMAGE);
firedLoad = true;
}
return;
} else { // If the cached image has been recycled, remove it from the cache.
mMemoryCache.remove(hash);
}
}
if (imageref.isNetworkUrl()) {
boolean isCachedInDisk = false;
URI uri = null;
try {
String imageUrl = TiUrl.getCleanUri(imageref.getUrl()).toString();
uri = new URI(imageUrl);
isCachedInDisk = TiResponseCache.peekFollowingRedirects(uri);
} catch (URISyntaxException e) {
Log.e(TAG, "URISyntaxException for url " + imageref.getUrl(), e);
} catch (NullPointerException e) {
Log.e(TAG, "NullPointerException for url " + imageref.getUrl(), e);
}
// Check if the image is not cached in disc and the uri is valid.
if (!isCachedInDisk && uri != null) {
TiDownloadManager.getInstance().download(uri, downloadListener);
} else {
// If the image has been cached in disk or the uri is not valid,
// fetch and cache it and update the UI.
TiLoadImageManager.getInstance().load(imageref, loadImageListener);
}
} else {
TiLoadImageManager.getInstance().load(imageref, loadImageListener);
}
} else {
setImages();
}
}
private void setDefaultImage()
{
if (defaultImageSource == null) {
setImage(null);
return;
}
// Have to set default image in the UI thread to make sure it shows before the image
// is ready. Don't need to retry decode because we don't want to block UI.
setImage(defaultImageSource.getBitmap(false));
}
@Override
public void processProperties(KrollDict d)
{
boolean heightDefined = false;
boolean widthDefined = false;
TiImageView view = getView();
if (view == null) {
return;
}
if (d.containsKey(TiC.PROPERTY_WIDTH)) {
String widthProperty = d.getString(TiC.PROPERTY_WIDTH);
widthDefined = !TiC.LAYOUT_SIZE.equals(widthProperty) && !TiC.SIZE_AUTO.equals(widthProperty);
view.setWidthDefined(widthDefined);
}
if (d.containsKey(TiC.PROPERTY_HEIGHT)) {
String heightProperty = d.getString(TiC.PROPERTY_HEIGHT);
heightDefined = !TiC.LAYOUT_SIZE.equals(heightProperty) && !TiC.SIZE_AUTO.equals(heightProperty);
view.setHeightDefined(heightDefined);
}
if (d.containsKey(TiC.PROPERTY_LEFT) && d.containsKey(TiC.PROPERTY_RIGHT)) {
view.setWidthDefined(true);
}
if (d.containsKey(TiC.PROPERTY_TOP) && d.containsKey(TiC.PROPERTY_BOTTOM)) {
view.setHeightDefined(true);
}
if (d.containsKey(TiC.PROPERTY_IMAGES)) {
setImageSource(d.get(TiC.PROPERTY_IMAGES));
setImages();
}
if (d.containsKey(TiC.PROPERTY_ENABLE_ZOOM_CONTROLS)) {
view.setEnableZoomControls(TiConvert.toBoolean(d, TiC.PROPERTY_ENABLE_ZOOM_CONTROLS, true));
}
if (d.containsKey(TiC.PROPERTY_DEFAULT_IMAGE)) {
setDefaultImageSource(d.get(TiC.PROPERTY_DEFAULT_IMAGE));
}
if (d.containsKey(TiC.PROPERTY_IMAGE)) {
// processProperties is also called from TableView, we need check if we changed before re-creating the
// bitmap
boolean changeImage = true;
TiDrawableReference source = TiDrawableReference.fromObject(getProxy(), d.get(TiC.PROPERTY_IMAGE));
if (imageSources != null && imageSources.size() == 1) {
if (imageSources.get(0).equals(source)) {
changeImage = false;
}
}
if (changeImage) {
// Check for orientation and decodeRetries only if an image is specified
Object autoRotate = d.get(TiC.PROPERTY_AUTOROTATE);
if (autoRotate != null && TiConvert.toBoolean(autoRotate)) {
view.setOrientation(source.getOrientation());
}
if (d.containsKey(TiC.PROPERTY_DECODE_RETRIES)) {
source.setDecodeRetries(TiConvert.toInt(d.get(TiC.PROPERTY_DECODE_RETRIES),
TiDrawableReference.DEFAULT_DECODE_RETRIES));
}
setImageSource(source);
firedLoad = false;
setImageInternal();
}
} else {
if (!d.containsKey(TiC.PROPERTY_IMAGES)) {
getProxy().setProperty(TiC.PROPERTY_IMAGE, null);
if (defaultImageSource != null) {
setDefaultImage();
}
}
}
if (d.containsKey(TiC.PROPERTY_TINT_COLOR)) {
setTintColor(d.getString("tintColor"));
}
// If height and width is not defined, disable scaling for scrollview since an image
// can extend beyond the screensize in scrollview.
if (proxy.getParent() instanceof ScrollViewProxy && !heightDefined && !widthDefined) {
view.setEnableScale(false);
}
super.processProperties(d);
}
@Override
public void propertyChanged(String key, Object oldValue, Object newValue, KrollProxy proxy)
{
TiImageView view = getView();
if (view == null) {
return;
}
if (key.equals(TiC.PROPERTY_ENABLE_ZOOM_CONTROLS)) {
view.setEnableZoomControls(TiConvert.toBoolean(newValue));
} else if (key.equals(TiC.PROPERTY_IMAGE)) {
if ((oldValue == null && newValue != null) || (oldValue != null && !oldValue.equals(newValue))) {
TiDrawableReference source = TiDrawableReference.fromObject(getProxy(), newValue);
Object autoRotate = proxy.getProperty(TiC.PROPERTY_AUTOROTATE);
if (autoRotate != null && TiConvert.toBoolean(autoRotate)) {
view.setOrientation(source.getOrientation());
}
if (proxy.hasProperty(TiC.PROPERTY_DECODE_RETRIES)) {
source.setDecodeRetries(TiConvert.toInt(proxy.getProperty(TiC.PROPERTY_DECODE_RETRIES),
TiDrawableReference.DEFAULT_DECODE_RETRIES));
}
setImageSource(source);
firedLoad = false;
setImageInternal();
}
} else if (key.equals(TiC.PROPERTY_IMAGES)) {
if (newValue instanceof Object[]) {
if (oldValue == null || !oldValue.equals(newValue)) {
setImageSource(newValue);
setImages();
}
}
} else {
if (key.equals(TiC.PROPERTY_WIDTH)) {
String widthProperty = TiConvert.toString(newValue);
view.setWidthDefined(!TiC.LAYOUT_SIZE.equals(widthProperty) && !TiC.SIZE_AUTO.equals(widthProperty));
} else if (key.equals(TiC.PROPERTY_HEIGHT)) {
String heightProperty = TiConvert.toString(newValue);
view.setHeightDefined(!TiC.LAYOUT_SIZE.equals(heightProperty) && !TiC.SIZE_AUTO.equals(heightProperty));
}
super.propertyChanged(key, oldValue, newValue, proxy);
}
}
public void onCreate(Activity activity, Bundle savedInstanceState)
{
}
public void onDestroy(Activity activity)
{
}
public void onPause(Activity activity)
{
pause();
}
public void onResume(Activity activity)
{
resume();
}
public void onStart(Activity activity)
{
}
public void onStop(Activity activity)
{
stop();
}
public boolean isAnimating()
{
return animating.get() && !paused;
}
public boolean isPaused()
{
return paused;
}
public boolean isReverse()
{
return reverse;
}
public void setReverse(boolean reverse)
{
this.reverse = reverse;
}
public TiBlob toBlob()
{
TiDrawableReference imageReference =
imageSources != null && imageSources.size() == 1 ? imageSources.get(0) : null;
Bitmap cachedBitmap = imageReference != null ? mMemoryCache.get(imageReference.hashCode()) : null;
if (cachedBitmap != null && !cachedBitmap.isRecycled()) {
return TiBlob.blobFromImage(cachedBitmap);
} else {
TiImageView view = getView();
if (view != null) {
Drawable drawable = view.getImageDrawable();
if (drawable != null && drawable instanceof BitmapDrawable) {
Bitmap bitmap = ((BitmapDrawable) drawable).getBitmap();
if (bitmap == null && imageSources != null && imageSources.size() == 1) {
bitmap = imageSources.get(0).getBitmap(true);
}
if (bitmap != null) {
if (imageReference != null) {
mMemoryCache.put(imageReference.hashCode(), bitmap);
}
return TiBlob.blobFromImage(bitmap);
}
}
}
}
return null;
}
public void setTintColor(String color)
{
if (!TiApplication.isUIThread()) {
Message message = mainHandler.obtainMessage(SET_TINT, color);
message.sendToTarget();
} else {
handleTint(color);
}
}
public void handleTint(String color)
{
TiImageView view = getView();
view.setTintColor(color);
}
public int getTintColor()
{
TiImageView view = getView();
return view.getTintColor();
}
@Override
public void release()
{
handleStop();
synchronized (releasedLock)
{
if (imageSources != null) {
for (TiDrawableReference imageref : imageSources) {
int hash = imageref.hashCode();
mMemoryCache.remove(hash); //Release the cached images
}
imageSources.clear();
imageSources = null;
}
}
if (timer != null) {
timer.cancel();
timer = null;
}
defaultImageSource = null;
super.release();
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.testsuite.multitenantedairavata;
import org.apache.airavata.api.Airavata;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType;
import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
import org.apache.airavata.model.appcatalog.appinterface.DataType;
import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.testsuite.multitenantedairavata.utils.FrameworkUtils;
import org.apache.airavata.testsuite.multitenantedairavata.utils.TestFrameworkConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ApplicationRegister {
private Airavata.Client airavata;
private List<Gateway> allGateways;
private Map<String, String> applicationInterfaceListPerGateway;
private Map<String, String> applicationDeployementListPerGateway;
private final static Logger logger = LoggerFactory.getLogger(ApplicationRegister.class);
private String stampedeResourceId;
private String trestlesResourceId;
private String br2ResourceId;
private String gordenResourceId;
private String alamoResourceId;
private List<String> gatewaysToAvoid;
public ApplicationRegister(Airavata.Client airavata, TestFrameworkProps props) throws Exception {
this.airavata = airavata;
allGateways = getAllGateways(airavata);
applicationInterfaceListPerGateway = new HashMap<String, String>();
applicationDeployementListPerGateway = new HashMap<String, String>();
FrameworkUtils frameworkUtils = FrameworkUtils.getInstance();
gatewaysToAvoid = frameworkUtils.getGatewayListToAvoid(props.getSkippedGateways());
}
public List<Gateway> getAllGateways(Airavata.Client client) throws Exception{
try {
return client.getAllGateways();
}catch (Exception e){
logger.error("Error while getting all the gateways", e);
throw new Exception("Error while getting all the gateways", e);
}
}
public void addApplications () throws Exception{
Map<String, String> allComputeResourceNames = airavata.getAllComputeResourceNames();
System.out.println("All compute resources :" + allComputeResourceNames.size());
for (String resourceId : allComputeResourceNames.keySet()){
String resourceName = allComputeResourceNames.get(resourceId);
if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.STAMPEDE_RESOURCE_NAME)){
stampedeResourceId = resourceId;
}else if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.TRESTLES_RESOURCE_NAME)){
trestlesResourceId = resourceId;
}else if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.BR2_RESOURCE_NAME)){
br2ResourceId = resourceId;
}else if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.GORDEN_RESOURCE_NAME)){
gordenResourceId = resourceId;
}else if (resourceName.equals(TestFrameworkConstants.AppcatalogConstants.ALAMO_RESOURCE_NAME)){
alamoResourceId = resourceId;
}
}
addUltrascanApplication();
// addAmberApplication();
// addEchoApplication();
// addLAMMPSApplication();
}
protected void addAmberApplication () throws Exception{
for (Gateway gateway : allGateways) {
boolean isgatewayValid = true;
for (String ovoidGateway : gatewaysToAvoid){
if (gateway.getGatewayId().equals(ovoidGateway)){
isgatewayValid = false;
break;
}
}
if (isgatewayValid) {
// add amber module
String amberModuleId = airavata.registerApplicationModule(gateway.getGatewayId(),
createApplicationModule(TestFrameworkConstants.AppcatalogConstants.AMBER_APP_NAME, "12.0", TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION));
System.out.println("Amber Module Id " + amberModuleId);
// add amber interface
String amberInterfaceId = registerAmberInterface(gateway, amberModuleId);
applicationInterfaceListPerGateway.put(amberInterfaceId, gateway.getGatewayId());
// add amber deployment
List<String> moduleLoadCMDs = new ArrayList<String>();
moduleLoadCMDs.add("module load amber");
ApplicationDeploymentDescription amberStampedeDeployment = createApplicationDeployment(amberModuleId, stampedeResourceId,
"/opt/apps/intel13/mvapich2_1_9/amber/12.0/bin/sander.MPI -O", ApplicationParallelismType.MPI,
TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION, moduleLoadCMDs, null, null);
String amberStampedeAppDeployId = airavata.registerApplicationDeployment(gateway.getGatewayId(), amberStampedeDeployment);
String amberTrestlesAppDeployId = airavata.registerApplicationDeployment(gateway.getGatewayId(),
createApplicationDeployment(amberModuleId, trestlesResourceId,
"/opt/amber/bin/sander.MPI -O", ApplicationParallelismType.MPI,
TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION, moduleLoadCMDs, null, null));
List<String> amberModuleLoadCMDsBr2 = new ArrayList<String>();
amberModuleLoadCMDsBr2.add("module load amber/gnu/mpi/12");
amberModuleLoadCMDsBr2.add("module swap PrgEnv-cray PrgEnv-gnu");
String amberBr2AppDeployId = airavata.registerApplicationDeployment(gateway.getGatewayId(),
createApplicationDeployment(amberModuleId, br2ResourceId,
"/N/soft/cle4/amber/gnu/mpi/12/amber12/bin/sander.MPI -O", ApplicationParallelismType.MPI,
TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION, amberModuleLoadCMDsBr2, null, null));
applicationDeployementListPerGateway.put(amberStampedeAppDeployId, gateway.getGatewayId());
applicationDeployementListPerGateway.put(amberTrestlesAppDeployId, gateway.getGatewayId());
applicationDeployementListPerGateway.put(amberBr2AppDeployId, gateway.getGatewayId());
}
}
}
protected void addUltrascanApplication () throws Exception{
for (Gateway gateway : allGateways) {
boolean isgatewayValid = true;
for (String ovoidGateway : gatewaysToAvoid){
if (gateway.getGatewayId().equals(ovoidGateway)){
isgatewayValid = false;
break;
}
}
if (isgatewayValid) {
// add amber module
String ultrascanModuleId = airavata.registerApplicationModule(gateway.getGatewayId(),
createApplicationModule(TestFrameworkConstants.AppcatalogConstants.ULTRASCAN, "1.0", TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION));
System.out.println("Ultrascan module Id " + ultrascanModuleId);
// add amber interface
String ultrascanInterfaceId = registerUltrascanInterface(gateway, ultrascanModuleId);
applicationInterfaceListPerGateway.put(ultrascanInterfaceId, gateway.getGatewayId());
// add amber deployment
ApplicationDeploymentDescription ultrascanStampedeDeployment = createApplicationDeployment(ultrascanModuleId, stampedeResourceId,
"/home1/01623/us3/bin/us_mpi_analysis", ApplicationParallelismType.MPI,
TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION, null, null, null);
String ultrascanStampedeAppDeployId = airavata.registerApplicationDeployment(gateway.getGatewayId(), ultrascanStampedeDeployment);
String ultrascanTrestlesAppDeployId = airavata.registerApplicationDeployment(gateway.getGatewayId(),
createApplicationDeployment(ultrascanModuleId, trestlesResourceId,
"/home/us3/trestles/bin/us_mpi_analysis", ApplicationParallelismType.MPI,
TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION, null, null, null));
String ultrascanGordenAppDepId = airavata.registerApplicationDeployment(gateway.getGatewayId(),
createApplicationDeployment(ultrascanModuleId,gordenResourceId,
"/home/us3/gordon/bin/us_mpi_analysis", ApplicationParallelismType.MPI,
TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION, null, null, null));
List<String> alamoModules = new ArrayList<>();
alamoModules.add("module load intel/2015/64");
alamoModules.add("module load openmpi/intel/1.8.4");
alamoModules.add("module load qt4/4.8.6");
alamoModules.add("module load ultrascan3/3.3");
String ultrascanAlamoAppId = airavata.registerApplicationDeployment(gateway.getGatewayId(),
createApplicationDeployment(ultrascanModuleId,alamoResourceId,
"/home/us3/bin/us_mpi_analysis", ApplicationParallelismType.OPENMP,
TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION, alamoModules, null, null));
applicationDeployementListPerGateway.put(ultrascanStampedeAppDeployId, gateway.getGatewayId());
applicationDeployementListPerGateway.put(ultrascanTrestlesAppDeployId, gateway.getGatewayId());
applicationDeployementListPerGateway.put(ultrascanGordenAppDepId, gateway.getGatewayId());
applicationDeployementListPerGateway.put(ultrascanAlamoAppId, gateway.getGatewayId());
}
}
}
private String registerUltrascanInterface(Gateway gateway, String ultrascanModuleId) throws org.apache.thrift.TException {
List<String> appModules = new ArrayList<String>();
appModules.add(ultrascanModuleId);
InputDataObjectType input1 = createAppInput("input", null,
DataType.URI, null, 1, true, true,false, "Input tar file", null);
InputDataObjectType input2 = createAppInput("mgroupcount", "-mgroupcount=1",
DataType.STRING, null, 3, true, true,false, "mgroupcount", null);
InputDataObjectType input3 = createAppInput("walltime", "-walltime=60",
DataType.STRING, null, 2, true, true,false, "walltime", null);
List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
applicationInputs.add(input1);
applicationInputs.add(input2);
applicationInputs.add(input3);
OutputDataObjectType output1 = createAppOutput("ultrascanOutput", "analysis-results.tar", DataType.URI, true, false, null);
output1.setLocation("output");
OutputDataObjectType output2 = createAppOutput("STDOUT", null, DataType.STDOUT, true, false, null);
OutputDataObjectType output3 = createAppOutput("STDERR", null, DataType.STDERR, true, false, null);
List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
applicationOutputs.add(output1);
applicationOutputs.add(output2);
applicationOutputs.add(output3);
String ultrascanAppId = airavata.registerApplicationInterface(gateway.getGatewayId(),
createApplicationInterfaceDescription(TestFrameworkConstants.AppcatalogConstants.ULTRASCAN, TestFrameworkConstants.AppcatalogConstants.ULTRASCAN_DESCRIPTION,
appModules, applicationInputs, applicationOutputs));
System.out.println("Ultrascan Application Interface Id " + ultrascanAppId);
return ultrascanAppId;
}
private String registerAmberInterface(Gateway gateway, String amberModuleId) throws org.apache.thrift.TException {
List<String> appModules = new ArrayList<String>();
appModules.add(amberModuleId);
InputDataObjectType input1 = createAppInput("heatRst", null,
DataType.URI, "-c", 1, true, true,false, "Heating up the system equilibration stage - 02_Heat.rst", null);
InputDataObjectType input2 = createAppInput("prodIn", null,
DataType.URI, "-i ", 2, true, true, false, "Constant pressure and temperature for production stage - 03_Prod.in", null);
InputDataObjectType input3 = createAppInput("prmtop", null,
DataType.URI, "-p", 3, true, true, false, "Parameter and Topology coordinates - prmtop", null);
List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
applicationInputs.add(input1);
applicationInputs.add(input2);
applicationInputs.add(input3);
OutputDataObjectType output1 = createAppOutput("AMBER_Execution_Summary", "03_Prod.info", DataType.URI, true, true, "-inf");
OutputDataObjectType output2 = createAppOutput("AMBER_Execution_log", "03_Prod.out", DataType.URI, true, true, "-o");
OutputDataObjectType output3 = createAppOutput("AMBER_Trajectory_file", "03_Prod.mdcrd", DataType.URI, true, true, "-x");
OutputDataObjectType output4 = createAppOutput("AMBER_Restart_file", "03_Prod.rst", DataType.URI, true, true, " -r");
OutputDataObjectType output5 = createAppOutput("STDOUT", null, DataType.STDOUT, true, false, null);
OutputDataObjectType output6 = createAppOutput("STDERR", null, DataType.STDERR, true, false, null);
List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
applicationOutputs.add(output1);
applicationOutputs.add(output2);
applicationOutputs.add(output3);
applicationOutputs.add(output4);
applicationOutputs.add(output5);
applicationOutputs.add(output6);
String amberInterfaceId = airavata.registerApplicationInterface(gateway.getGatewayId(),
createApplicationInterfaceDescription(TestFrameworkConstants.AppcatalogConstants.AMBER_APP_NAME, TestFrameworkConstants.AppcatalogConstants.AMBER_DESCRIPTION,
appModules, applicationInputs, applicationOutputs));
System.out.println("Amber Application Interface Id " + amberInterfaceId);
return amberInterfaceId;
}
private String registerEchoInterface(Gateway gateway, String moduleId) throws org.apache.thrift.TException {
List<String> appModules = new ArrayList<String>();
appModules.add(moduleId);
InputDataObjectType input1 = createAppInput("input_to_Echo", null,
DataType.STRING, null, 1, true, true,false, "Sample input to Echo", null);
List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
applicationInputs.add(input1);
OutputDataObjectType output1 = createAppOutput("STDOUT", null, DataType.STDOUT, true, false, null);
OutputDataObjectType output2 = createAppOutput("STDERR", null, DataType.STDERR, true, false, null);
List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
applicationOutputs.add(output1);
applicationOutputs.add(output2);
String echoInterfaceId = airavata.registerApplicationInterface(gateway.getGatewayId(),
createApplicationInterfaceDescription(TestFrameworkConstants.AppcatalogConstants.ECHO_NAME, TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION,
appModules, applicationInputs, applicationOutputs));
System.out.println("Echo Application Interface Id " + echoInterfaceId);
return echoInterfaceId;
}
protected void addEchoApplication() throws Exception{
for (Gateway gateway : allGateways){
boolean isgatewayValid = true;
for (String ovoidGateway : gatewaysToAvoid){
if (gateway.getGatewayId().equals(ovoidGateway)){
isgatewayValid = false;
break;
}
}
if (isgatewayValid) {
// add echo module
String echoModuleId = airavata.registerApplicationModule(gateway.getGatewayId(),
createApplicationModule(TestFrameworkConstants.AppcatalogConstants.ECHO_NAME, "1.0", TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION));
System.out.println("Echo Module Id " + echoModuleId);
// add amber interface
String echoInterfaceId = registerEchoInterface(gateway, echoModuleId);
applicationInterfaceListPerGateway.put(echoInterfaceId, gateway.getGatewayId());
// add amber deployment
String echoStampedeAppDeployId = airavata.registerApplicationDeployment(gateway.getGatewayId(),
createApplicationDeployment(echoModuleId, stampedeResourceId,
"/home1/01437/ogce/production/app_wrappers/echo_wrapper.sh", ApplicationParallelismType.SERIAL,
TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION, null, null, null));
String echoTrestlesAppDeployId = airavata.registerApplicationDeployment(gateway.getGatewayId(),
createApplicationDeployment(echoModuleId, trestlesResourceId,
"/home/ogce/production/app_wrappers/echo_wrapper.sh", ApplicationParallelismType.SERIAL,
TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION, null, null, null));
String echoBr2AppDeployId = airavata.registerApplicationDeployment(gateway.getGatewayId(),
createApplicationDeployment(echoModuleId, br2ResourceId,
"/N/u/cgateway/BigRed2/production/app_wrappers/echo_wrapper.sh", ApplicationParallelismType.SERIAL,
TestFrameworkConstants.AppcatalogConstants.ECHO_DESCRIPTION, null, null, null));
applicationDeployementListPerGateway.put(echoStampedeAppDeployId, gateway.getGatewayId());
applicationDeployementListPerGateway.put(echoTrestlesAppDeployId, gateway.getGatewayId());
applicationDeployementListPerGateway.put(echoBr2AppDeployId, gateway.getGatewayId());
}
}
}
protected void addLAMMPSApplication() throws Exception{
// add LAMPPS module
// add LAMPSS interface
// add LAMPSS deployment
}
protected ApplicationDeploymentDescription createApplicationDeployment(String appModuleId,
String computeResourceId,
String executablePath,
ApplicationParallelismType parallelism,
String appDeploymentDescription,
List<String> moduleLoadCmds,
List<String> preJobCmds,
List<String> postJobCmds) {
ApplicationDeploymentDescription deployment = new ApplicationDeploymentDescription();
deployment.setAppDeploymentDescription(appDeploymentDescription);
deployment.setAppModuleId(appModuleId);
deployment.setComputeHostId(computeResourceId);
deployment.setExecutablePath(executablePath);
deployment.setParallelism(parallelism);
deployment.setModuleLoadCmds(moduleLoadCmds);
deployment.setPreJobCommands(preJobCmds);
deployment.setPostJobCommands(postJobCmds);
return deployment;
}
protected ApplicationModule createApplicationModule(String appModuleName,
String appModuleVersion, String appModuleDescription) {
ApplicationModule module = new ApplicationModule();
module.setAppModuleDescription(appModuleDescription);
module.setAppModuleName(appModuleName);
module.setAppModuleVersion(appModuleVersion);
return module;
}
protected InputDataObjectType createAppInput (String inputName,
String value,
DataType type,
String applicationArgument,
int order,
boolean isRequired,
boolean requiredToCMD,
boolean stdIn,
String description,
String metadata) {
InputDataObjectType input = new InputDataObjectType();
if (inputName != null) input.setName(inputName);
if (value != null) input.setValue(value);
if (type != null) input.setType(type);
if (applicationArgument != null) input.setApplicationArgument(applicationArgument);
input.setInputOrder(order);
input.setIsRequired(isRequired);
input.setRequiredToAddedToCommandLine(requiredToCMD);
if (description != null) input.setUserFriendlyDescription(description);
input.setStandardInput(stdIn);
if (metadata != null) input.setMetaData(metadata);
return input;
}
protected OutputDataObjectType createAppOutput(String inputName,
String value,
DataType type,
boolean isRequired,
boolean requiredToCMD,
String argument) {
OutputDataObjectType outputDataObjectType = new OutputDataObjectType();
if (inputName != null) outputDataObjectType.setName(inputName);
if (value != null) outputDataObjectType.setValue(value);
if (type != null) outputDataObjectType.setType(type);
outputDataObjectType.setIsRequired(isRequired);
outputDataObjectType.setRequiredToAddedToCommandLine(requiredToCMD);
outputDataObjectType.setApplicationArgument(argument);
return outputDataObjectType;
}
protected ApplicationInterfaceDescription createApplicationInterfaceDescription
(String applicationName, String applicationDescription, List<String> applicationModules,
List<InputDataObjectType> applicationInputs, List<OutputDataObjectType>applicationOutputs) {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationName(applicationName);
if (applicationDescription != null) applicationInterfaceDescription.setApplicationDescription(applicationDescription);
if (applicationModules != null) applicationInterfaceDescription.setApplicationModules(applicationModules);
if (applicationInputs != null) applicationInterfaceDescription.setApplicationInputs(applicationInputs);
if (applicationOutputs != null) applicationInterfaceDescription.setApplicationOutputs(applicationOutputs);
return applicationInterfaceDescription;
}
public Map<String, String> getApplicationInterfaceListPerGateway() {
return applicationInterfaceListPerGateway;
}
public void setApplicationInterfaceListPerGateway(Map<String, String> applicationInterfaceListPerGateway) {
this.applicationInterfaceListPerGateway = applicationInterfaceListPerGateway;
}
public Map<String, String> getApplicationDeployementListPerGateway() {
return applicationDeployementListPerGateway;
}
public void setApplicationDeployementListPerGateway(Map<String, String> applicationDeployementListPerGateway) {
this.applicationDeployementListPerGateway = applicationDeployementListPerGateway;
}
}
| |
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.function.bi.conversion;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.bi.BiDoubleConsumer;
import at.gridtec.lambda4j.function.bi.BiDoubleFunction;
import at.gridtec.lambda4j.function.bi.BiFunction2;
import at.gridtec.lambda4j.function.bi.to.ToLongBiFunction2;
import at.gridtec.lambda4j.function.conversion.BooleanToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ByteToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.CharToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToLongFunction2;
import at.gridtec.lambda4j.function.conversion.FloatToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.LongToByteFunction;
import at.gridtec.lambda4j.function.conversion.LongToCharFunction;
import at.gridtec.lambda4j.function.conversion.LongToFloatFunction;
import at.gridtec.lambda4j.function.conversion.LongToShortFunction;
import at.gridtec.lambda4j.function.conversion.ShortToDoubleFunction;
import at.gridtec.lambda4j.operator.binary.DoubleBinaryOperator2;
import at.gridtec.lambda4j.operator.binary.LongBinaryOperator2;
import at.gridtec.lambda4j.predicate.bi.BiDoublePredicate;
import org.apache.commons.lang3.tuple.Pair;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.DoubleToLongFunction;
import java.util.function.DoubleUnaryOperator;
import java.util.function.IntToDoubleFunction;
import java.util.function.LongConsumer;
import java.util.function.LongFunction;
import java.util.function.LongPredicate;
import java.util.function.LongToDoubleFunction;
import java.util.function.LongToIntFunction;
import java.util.function.LongUnaryOperator;
import java.util.function.ToDoubleFunction;
/**
* Represents an operation that accepts two {@code double}-valued input arguments and produces a
* {@code long}-valued result.
* This is a primitive specialization of {@link BiFunction2}.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #applyAsLong(double, double)}.
*
* @see BiFunction2
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface BiDoubleToLongFunction extends Lambda {
/**
* Constructs a {@link BiDoubleToLongFunction} based on a lambda expression or a method reference. Thereby the given
* lambda expression or method reference is returned on an as-is basis to implicitly transform it to the desired
* type. With this method, it is possible to ensure that correct type is used from lambda expression or method
* reference.
*
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code BiDoubleToLongFunction} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static BiDoubleToLongFunction of(@Nullable final BiDoubleToLongFunction expression) {
return expression;
}
/**
* Calls the given {@link BiDoubleToLongFunction} with the given arguments and returns its result.
*
* @param function The function to be called
* @param value1 The first argument to the function
* @param value2 The second argument to the function
* @return The result from the given {@code BiDoubleToLongFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
static long call(@Nonnull final BiDoubleToLongFunction function, double value1, double value2) {
Objects.requireNonNull(function);
return function.applyAsLong(value1, value2);
}
/**
* Creates a {@link BiDoubleToLongFunction} which uses the {@code first} parameter of this one as argument for the
* given {@link DoubleToLongFunction}.
*
* @param function The function which accepts the {@code first} parameter of this one
* @return Creates a {@code BiDoubleToLongFunction} which uses the {@code first} parameter of this one as argument
* for the given {@code DoubleToLongFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static BiDoubleToLongFunction onlyFirst(@Nonnull final DoubleToLongFunction function) {
Objects.requireNonNull(function);
return (value1, value2) -> function.applyAsLong(value1);
}
/**
* Creates a {@link BiDoubleToLongFunction} which uses the {@code second} parameter of this one as argument for the
* given {@link DoubleToLongFunction}.
*
* @param function The function which accepts the {@code second} parameter of this one
* @return Creates a {@code BiDoubleToLongFunction} which uses the {@code second} parameter of this one as argument
* for the given {@code DoubleToLongFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static BiDoubleToLongFunction onlySecond(@Nonnull final DoubleToLongFunction function) {
Objects.requireNonNull(function);
return (value1, value2) -> function.applyAsLong(value2);
}
/**
* Creates a {@link BiDoubleToLongFunction} which always returns a given value.
*
* @param ret The return value for the constant
* @return A {@code BiDoubleToLongFunction} which always returns a given value.
*/
@Nonnull
static BiDoubleToLongFunction constant(long ret) {
return (value1, value2) -> ret;
}
/**
* Applies this function to the given arguments.
*
* @param value1 The first argument to the function
* @param value2 The second argument to the function
* @return The return value from the function, which is its result.
*/
long applyAsLong(double value1, double value2);
/**
* Applies this function partially to some arguments of this one, producing a {@link DoubleToLongFunction2} as
* result.
*
* @param value1 The first argument to this function used to partially apply this function
* @return A {@code DoubleToLongFunction2} that represents this function partially applied the some arguments.
*/
@Nonnull
default DoubleToLongFunction2 papplyAsLong(double value1) {
return (value2) -> this.applyAsLong(value1, value2);
}
/**
* Returns the number of arguments for this function.
*
* @return The number of arguments for this function.
* @implSpec The default implementation always returns {@code 2}.
*/
@Nonnegative
default int arity() {
return 2;
}
/**
* Returns a composed {@link ToLongBiFunction2} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <A> The type of the argument to the first given function, and of composed function
* @param <B> The type of the argument to the second given function, and of composed function
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code ToLongBiFunction2} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A, B> ToLongBiFunction2<A, B> compose(@Nonnull final ToDoubleFunction<? super A> before1,
@Nonnull final ToDoubleFunction<? super B> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (a, b) -> applyAsLong(before1.applyAsDouble(a), before2.applyAsDouble(b));
}
/**
* Returns a composed {@link BiBooleanToLongFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code boolean} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiBooleanToLongFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default BiBooleanToLongFunction composeFromBoolean(@Nonnull final BooleanToDoubleFunction before1,
@Nonnull final BooleanToDoubleFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsLong(before1.applyAsDouble(value1), before2.applyAsDouble(value2));
}
/**
* Returns a composed {@link BiByteToLongFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiByteToLongFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default BiByteToLongFunction composeFromByte(@Nonnull final ByteToDoubleFunction before1,
@Nonnull final ByteToDoubleFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsLong(before1.applyAsDouble(value1), before2.applyAsDouble(value2));
}
/**
* Returns a composed {@link BiCharToLongFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiCharToLongFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default BiCharToLongFunction composeFromChar(@Nonnull final CharToDoubleFunction before1,
@Nonnull final CharToDoubleFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsLong(before1.applyAsDouble(value1), before2.applyAsDouble(value2));
}
/**
* Returns a composed {@link BiDoubleToLongFunction} that first applies the {@code before} operators to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code double} input, before this primitive function is executed.
*
* @param before1 The first operator to apply before this function is applied
* @param before2 The second operator to apply before this function is applied
* @return A composed {@code BiDoubleToLongFunction} that first applies the {@code before} operators to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default BiDoubleToLongFunction composeFromDouble(@Nonnull final DoubleUnaryOperator before1,
@Nonnull final DoubleUnaryOperator before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsLong(before1.applyAsDouble(value1), before2.applyAsDouble(value2));
}
/**
* Returns a composed {@link BiFloatToLongFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code float} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiFloatToLongFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default BiFloatToLongFunction composeFromFloat(@Nonnull final FloatToDoubleFunction before1,
@Nonnull final FloatToDoubleFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsLong(before1.applyAsDouble(value1), before2.applyAsDouble(value2));
}
/**
* Returns a composed {@link BiIntToLongFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiIntToLongFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default BiIntToLongFunction composeFromInt(@Nonnull final IntToDoubleFunction before1,
@Nonnull final IntToDoubleFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsLong(before1.applyAsDouble(value1), before2.applyAsDouble(value2));
}
/**
* Returns a composed {@link LongBinaryOperator2} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code LongBinaryOperator2} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default LongBinaryOperator2 composeFromLong(@Nonnull final LongToDoubleFunction before1,
@Nonnull final LongToDoubleFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsLong(before1.applyAsDouble(value1), before2.applyAsDouble(value2));
}
/**
* Returns a composed {@link BiShortToLongFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code short} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiShortToLongFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default BiShortToLongFunction composeFromShort(@Nonnull final ShortToDoubleFunction before1,
@Nonnull final ShortToDoubleFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsLong(before1.applyAsDouble(value1), before2.applyAsDouble(value2));
}
/**
* Returns a composed {@link BiDoubleFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <S> The type of return value from the {@code after} function, and of the composed function
* @param after The function to apply after this function is applied
* @return A composed {@code BiDoubleFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to return every type.
*/
@Nonnull
default <S> BiDoubleFunction<S> andThen(@Nonnull final LongFunction<? extends S> after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.apply(applyAsLong(value1, value2));
}
/**
* Returns a composed {@link BiDoublePredicate} that first applies this function to its input, and then applies the
* {@code after} predicate to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code boolean}.
*
* @param after The predicate to apply after this function is applied
* @return A composed {@code BiDoublePredicate} that first applies this function to its input, and then applies the
* {@code after} predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default BiDoublePredicate andThenToBoolean(@Nonnull final LongPredicate after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.test(applyAsLong(value1, value2));
}
/**
* Returns a composed {@link BiDoubleToByteFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code byte}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiDoubleToByteFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default BiDoubleToByteFunction andThenToByte(@Nonnull final LongToByteFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsByte(applyAsLong(value1, value2));
}
/**
* Returns a composed {@link BiDoubleToCharFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code char}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiDoubleToCharFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default BiDoubleToCharFunction andThenToChar(@Nonnull final LongToCharFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsChar(applyAsLong(value1, value2));
}
/**
* Returns a composed {@link DoubleBinaryOperator2} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code double}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code DoubleBinaryOperator2} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default DoubleBinaryOperator2 andThenToDouble(@Nonnull final LongToDoubleFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsDouble(applyAsLong(value1, value2));
}
/**
* Returns a composed {@link BiDoubleToFloatFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code float}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiDoubleToFloatFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default BiDoubleToFloatFunction andThenToFloat(@Nonnull final LongToFloatFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsFloat(applyAsLong(value1, value2));
}
/**
* Returns a composed {@link BiDoubleToIntFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code int}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiDoubleToIntFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default BiDoubleToIntFunction andThenToInt(@Nonnull final LongToIntFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsInt(applyAsLong(value1, value2));
}
/**
* Returns a composed {@link BiDoubleToLongFunction} that first applies this function to its input, and then applies
* the {@code after} operator to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code long}.
*
* @param after The operator to apply after this function is applied
* @return A composed {@code BiDoubleToLongFunction} that first applies this function to its input, and then applies
* the {@code after} operator to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default BiDoubleToLongFunction andThenToLong(@Nonnull final LongUnaryOperator after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsLong(applyAsLong(value1, value2));
}
/**
* Returns a composed {@link BiDoubleToShortFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code short}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiDoubleToShortFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default BiDoubleToShortFunction andThenToShort(@Nonnull final LongToShortFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsShort(applyAsLong(value1, value2));
}
/**
* Returns a composed {@link BiDoubleConsumer} that fist applies this function to its input, and then consumes the
* result using the given {@link LongConsumer}. If evaluation of either operation throws an exception, it is relayed
* to the caller of the composed operation.
*
* @param consumer The operation which consumes the result from this operation
* @return A composed {@code BiDoubleConsumer} that first applies this function to its input, and then consumes the
* result using the given {@code LongConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default BiDoubleConsumer consume(@Nonnull final LongConsumer consumer) {
Objects.requireNonNull(consumer);
return (value1, value2) -> consumer.accept(applyAsLong(value1, value2));
}
/**
* Returns a memoized (caching) version of this {@link BiDoubleToLongFunction}. Whenever it is called, the mapping
* between the input parameters and the return value is preserved in a cache, making subsequent calls returning the
* memoized value instead of computing the return value again.
* <p>
* Unless the function and therefore the used cache will be garbage-collected, it will keep all memoized values
* forever.
*
* @return A memoized (caching) version of this {@code BiDoubleToLongFunction}.
* @implSpec This implementation does not allow the input parameters or return value to be {@code null} for the
* resulting memoized function, as the cache used internally does not permit {@code null} keys or values.
* @implNote The returned memoized function can be safely used concurrently from multiple threads which makes it
* thread-safe.
*/
@Nonnull
default BiDoubleToLongFunction memoized() {
if (isMemoized()) {
return this;
} else {
final Map<Pair<Double, Double>, Long> cache = new ConcurrentHashMap<>();
final Object lock = new Object();
return (BiDoubleToLongFunction & Memoized) (value1, value2) -> {
final long returnValue;
synchronized (lock) {
returnValue = cache.computeIfAbsent(Pair.of(value1, value2),
key -> applyAsLong(key.getLeft(), key.getRight()));
}
return returnValue;
};
}
}
/**
* Returns a composed {@link BiFunction2} which represents this {@link BiDoubleToLongFunction}. Thereby the
* primitive input argument for this function is autoboxed. This method provides the possibility to use this {@code
* BiDoubleToLongFunction} with methods provided by the {@code JDK}.
*
* @return A composed {@code BiFunction2} which represents this {@code BiDoubleToLongFunction}.
*/
@Nonnull
default BiFunction2<Double, Double, Long> boxed() {
return this::applyAsLong;
}
}
| |
package com.frozeninferno.nexusbios;
import android.app.ActionBar;
import android.app.Activity;
import android.app.Fragment;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.FrameLayout;
import android.widget.ListView;
/**
* Fragment used for managing interactions for and presentation of a navigation drawer.
* See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction">
* design guidelines</a> for a complete explanation of the behaviors implemented here.
*/
public class NavigationDrawerFragment extends Fragment {
/**
* Remember the position of the selected item.
*/
private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position";
/**
* Per the design guidelines, you should show the drawer on launch until the user manually
* expands it. This shared preference tracks this.
*/
private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned";
/**
* A pointer to the current callbacks instance (the Activity).
*/
private NavigationDrawerCallbacks mCallbacks;
/**
* Helper component that ties the action bar to the navigation drawer.
*/
private ActionBarDrawerToggle mDrawerToggle;
private DrawerLayout mDrawerLayout;
private ListView mDrawerListView;
private View mFragmentContainerView;
private FrameLayout frame;
private int mCurrentSelectedPosition = 0;
private boolean mUserLearnedDrawer;
public NavigationDrawerFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Read in the flag indicating whether or not the user has demonstrated awareness of the
// drawer. See PREF_USER_LEARNED_DRAWER for details.
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity());
mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false);
if (savedInstanceState != null) {
mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION);
}
// Select either the default item (0) or the last selected item.
selectItem(mCurrentSelectedPosition);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Indicate that this fragment would like to influence the set of actions in the action bar.
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mDrawerListView = (ListView) inflater.inflate(
R.layout.fragment_navigation_drawer, container, false);
mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
}
});
mDrawerListView.setAdapter(new ArrayAdapter<>(
getActionBar().getThemedContext(),
android.R.layout.simple_list_item_activated_1,
android.R.id.text1,
new String[]{
getString(R.string.title_section1),
getString(R.string.title_install),
getString(R.string.title_uninstall)
}
));
mDrawerListView.setItemChecked(mCurrentSelectedPosition, true);
return mDrawerListView;
}
public boolean isDrawerOpen() {
return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView);
}
/**
* Users of this fragment must call this method to set up the navigation drawer interactions.
*
* @param fragmentId The android:id of this fragment in its activity's layout.
* @param drawerLayout The DrawerLayout containing this fragment's UI.
*/
public void setUp(int fragmentId, DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity().findViewById(fragmentId);
mDrawerLayout = drawerLayout;
// set a custom shadow that overlays the main content when the drawer opens
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
// set up the drawer's list view with items and click listener
ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeButtonEnabled(true);
// ActionBarDrawerToggle ties together the the proper interactions
// between the navigation drawer and the action bar app icon.
mDrawerToggle = new ActionBarDrawerToggle(
getActivity(), /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */
R.string.navigation_drawer_open, /* "open drawer" description for accessibility */
R.string.navigation_drawer_close /* "close drawer" description for accessibility */
) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
if (!isAdded()) {
return;
}
getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
if (!isAdded()) {
return;
}
if (!mUserLearnedDrawer) {
// The user manually opened the drawer; store this flag to prevent auto-showing
// the navigation drawer automatically in the future.
mUserLearnedDrawer = true;
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(getActivity());
sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply();
}
getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
@Override
public void onDrawerSlide(View drawerView, float slideOffset) {
float moveFactor = (mDrawerListView.getWidth() * slideOffset);
frame = (FrameLayout) getActivity().findViewById(R.id.container);
frame.setTranslationX(moveFactor);
}
};
// If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer,
// per the navigation drawer design guidelines.
//if (!mUserLearnedDrawer && !mFromSavedInstanceState) {
// mDrawerLayout.openDrawer(mFragmentContainerView);
//}
// Defer code dependent on restoration of previous instance state.
mDrawerLayout.post(new Runnable() {
@Override
public void run() {
mDrawerToggle.syncState();
}
});
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
private void selectItem(int position) {
mCurrentSelectedPosition = position;
if (mDrawerListView != null) {
mDrawerListView.setItemChecked(position, true);
}
if (mDrawerLayout != null) {
mDrawerLayout.closeDrawer(mFragmentContainerView);
}
if (mCallbacks != null) {
mCallbacks.onNavigationDrawerItemSelected(position);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mCallbacks = (NavigationDrawerCallbacks) activity;
} catch (ClassCastException e) {
throw new ClassCastException("Activity must implement NavigationDrawerCallbacks.");
}
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = null;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// Forward the new configuration the drawer toggle component.
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// If the drawer is open, show the global app actions in the action bar. See also
// showGlobalContextActionBar, which controls the top-left area of the action bar.
if (mDrawerLayout != null && isDrawerOpen()) {
inflater.inflate(R.menu.global, menu);
showGlobalContextActionBar();
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return mDrawerToggle.onOptionsItemSelected(item) || super.onOptionsItemSelected(item);
}
/**
* Per the navigation drawer design guidelines, updates the action bar to show the global app
* 'context', rather than just what's in the current screen.
*/
private void showGlobalContextActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setTitle(R.string.app_name);
}
private ActionBar getActionBar() {
return getActivity().getActionBar();
}
/**
* Callbacks interface that all activities using this fragment must implement.
*/
public static interface NavigationDrawerCallbacks {
/**
* Called when an item in the navigation drawer is selected.
*/
void onNavigationDrawerItemSelected(int position);
}
}
| |
package org.motechproject.openmrs19.tasks.impl;
import org.joda.time.DateTime;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.motechproject.openmrs19.domain.Concept;
import org.motechproject.openmrs19.domain.ConceptName;
import org.motechproject.openmrs19.domain.Encounter;
import org.motechproject.openmrs19.domain.EncounterType;
import org.motechproject.openmrs19.domain.Identifier;
import org.motechproject.openmrs19.domain.IdentifierType;
import org.motechproject.openmrs19.domain.Location;
import org.motechproject.openmrs19.domain.Observation;
import org.motechproject.openmrs19.domain.Patient;
import org.motechproject.openmrs19.domain.Person;
import org.motechproject.openmrs19.domain.Provider;
import org.motechproject.openmrs19.service.OpenMRSConceptService;
import org.motechproject.openmrs19.service.OpenMRSEncounterService;
import org.motechproject.openmrs19.service.OpenMRSLocationService;
import org.motechproject.openmrs19.service.OpenMRSPatientService;
import org.motechproject.openmrs19.service.OpenMRSPersonService;
import org.motechproject.openmrs19.service.OpenMRSProviderService;
import org.motechproject.openmrs19.tasks.OpenMRSActionProxyService;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static junit.framework.Assert.assertEquals;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class)
public class OpenMRSActionProxyServiceTest {
private static final String CONFIG_NAME = "Configuration name";
@Mock
private OpenMRSConceptService conceptService;
@Mock
private OpenMRSEncounterService encounterService;
@Mock
private OpenMRSLocationService locationService;
@Mock
private OpenMRSPatientService patientService;
@Mock
private OpenMRSPersonService personService;
@Mock
private OpenMRSProviderService providerService;
@Captor
private ArgumentCaptor<Encounter> encounterCaptor;
@Captor
private ArgumentCaptor<Patient> patientCaptor;
@Captor
private ArgumentCaptor<Person> personCaptor;
@InjectMocks
private OpenMRSActionProxyService openMRSActionProxyService = new OpenMRSActionProxyServiceImpl();
@Test
public void shouldCreateEncounterWithGivenParameters() {
Location location = new Location();
location.setName("testLocation");
Patient patient = new Patient();
patient.setUuid("10");
Provider provider = new Provider();
provider.setUuid("20");
Person person = new Person();
person.setUuid("30");
provider.setPerson(person);
DateTime encounterDatetime = new DateTime("2000-08-16T07:22:05Z");
Map<String, String> observations = new HashMap<>();
observations.put("testConceptName","testObservationValueName");
List<Observation> obsList = createObservationList();
Encounter encounter = new Encounter(location, new EncounterType("testEncounterType"), encounterDatetime.toDate(), patient, provider.getPerson(), obsList);
doReturn(patient).when(patientService).getPatientByUuid(eq(CONFIG_NAME), eq(patient.getUuid()));
doReturn(provider).when(providerService).getProviderByUuid(eq(CONFIG_NAME), eq(provider.getUuid()));
doReturn(Collections.singletonList(location))
.when(locationService).getLocations(eq(CONFIG_NAME), eq(location.getName()));
openMRSActionProxyService.createEncounter(CONFIG_NAME, new DateTime(encounter.getEncounterDatetime()),
encounter.getEncounterType().getName(), location.getName(), patient.getUuid(), provider.getUuid(),
observations);
verify(encounterService).createEncounter(eq(CONFIG_NAME), encounterCaptor.capture());
assertEquals(encounter, encounterCaptor.getValue());
}
@Test
public void shouldCreatePatientWithGivenParameters() {
Person person = createTestPerson();
Concept causeOfDeath = createTestConcept("testCauseOfDeath");
person.setDead(true);
person.setCauseOfDeath(causeOfDeath);
Location location = new Location();
location.setName("testLocation");
Identifier identifier = new Identifier("1000", new IdentifierType("CommCare CaseID"));
Patient patient = new Patient(Collections.singletonList(identifier), person, "500", location);
Map<String, String> identifiersMap = new HashMap<>();
identifiersMap.put("CommCare CaseID", "1000");
doReturn(causeOfDeath).when(conceptService).getConceptByUuid(eq(CONFIG_NAME), eq(causeOfDeath.getUuid()));
doReturn(Collections.singletonList(location))
.when(locationService).getLocations(eq(CONFIG_NAME), eq(location.getName()));
Person.Address personAddress = person.getPreferredAddress();
openMRSActionProxyService.createPatient(CONFIG_NAME, person.getPreferredName().getGivenName(),
person.getPreferredName().getMiddleName(), person.getPreferredName().getFamilyName(),
personAddress.getAddress1(), personAddress.getAddress2(), personAddress.getAddress3(),
personAddress.getAddress4(), personAddress.getAddress5(), personAddress.getAddress6(),
personAddress.getCityVillage(), personAddress.getStateProvince(), personAddress.getCountry(),
personAddress.getPostalCode(), personAddress.getCountyDistrict(), personAddress.getLatitude(),
personAddress.getLongitude(), new DateTime(personAddress.getStartDate()),
new DateTime(personAddress.getEndDate()), new DateTime(person.getBirthdate()),
person.getBirthdateEstimated(), person.getGender(), person.getDead(), causeOfDeath.getUuid(),
patient.getMotechId(), location.getName(), identifiersMap);
verify(patientService).createPatient(eq(CONFIG_NAME), patientCaptor.capture());
assertEquals(patient, patientCaptor.getValue());
}
@Test
public void shouldCreatePatientWithDefaultLocationWhenLocationNameIsNotProvided() {
Person person = createTestPerson();
Location location = new Location();
location.setName(OpenMRSActionProxyService.DEFAULT_LOCATION_NAME);
Identifier identifier = new Identifier("1000", new IdentifierType("CommCare CaseID"));
Patient patient = new Patient(Collections.singletonList(identifier), person, "500", location);
Map<String, String> identifiersMap = new HashMap<>();
identifiersMap.put("CommCare CaseID", "1000");
doReturn(Collections.singletonList(location))
.when(locationService).getLocations(eq(CONFIG_NAME), eq(OpenMRSActionProxyService.DEFAULT_LOCATION_NAME));
Person.Address personAddress = person.getPreferredAddress();
openMRSActionProxyService.createPatient(CONFIG_NAME, person.getPreferredName().getGivenName(),
person.getPreferredName().getMiddleName(), person.getPreferredName().getFamilyName(),
personAddress.getAddress1(), personAddress.getAddress2(), personAddress.getAddress3(),
personAddress.getAddress4(), personAddress.getAddress5(), personAddress.getAddress6(),
personAddress.getCityVillage(), personAddress.getStateProvince(), personAddress.getCountry(),
personAddress.getPostalCode(), personAddress.getCountyDistrict(), personAddress.getLatitude(),
personAddress.getLongitude(), new DateTime(personAddress.getStartDate()),
new DateTime(personAddress.getEndDate()), new DateTime(person.getBirthdate()),
person.getBirthdateEstimated(), person.getGender(), person.getDead(), "", patient.getMotechId(),
location.getName(), identifiersMap);
verify(patientService).createPatient(eq(CONFIG_NAME), patientCaptor.capture());
assertEquals(patient, patientCaptor.getValue());
}
@Test
public void shouldNotUsedDefaultLocationWhenLocationForGivenNameIsNotFound() {
Person person = createTestPerson();
Location location = new Location();
location.setName("testLocationNameForNotExistingLocation");
Identifier identifier = new Identifier("1000", new IdentifierType("CommCare CaseID"));
Patient patient = new Patient(Collections.singletonList(identifier), person, "500", null);
Map<String, String> identifiersMap = new HashMap<>();
identifiersMap.put("CommCare CaseID", "1000");
doReturn(Collections.emptyList()).when(locationService).getLocations(eq(CONFIG_NAME), eq(location.getName()));
Person.Address personAddress = person.getPreferredAddress();
openMRSActionProxyService.createPatient(CONFIG_NAME, person.getPreferredName().getGivenName(),
person.getPreferredName().getMiddleName(), person.getPreferredName().getFamilyName(),
personAddress.getAddress1(), personAddress.getAddress2(), personAddress.getAddress3(),
personAddress.getAddress4(), personAddress.getAddress5(), personAddress.getAddress6(),
personAddress.getCityVillage(), personAddress.getStateProvince(), personAddress.getCountry(),
personAddress.getPostalCode(), personAddress.getCountyDistrict(), personAddress.getLatitude(),
personAddress.getLongitude(), new DateTime(personAddress.getStartDate()),
new DateTime(personAddress.getEndDate()), new DateTime(person.getBirthdate()),
person.getBirthdateEstimated(), person.getGender(), person.getDead(), "", patient.getMotechId(),
location.getName(), identifiersMap);
verify(patientService).createPatient(eq(CONFIG_NAME), patientCaptor.capture());
// the expected patient object has location value set to null, the actual object should be the same
assertEquals(patient, patientCaptor.getValue());
}
@Test
public void shouldUpdatePersonWithGivenParameters() {
Person person = createTestPerson();
Concept causeOfDeath = createTestConcept("testCauseOfDeathConcept");
person.setDead(true);
person.setCauseOfDeath(causeOfDeath);
Person.Address personAddress = person.getPreferredAddress();
doReturn(causeOfDeath).when(conceptService).getConceptByUuid(eq(CONFIG_NAME), eq(causeOfDeath.getUuid()));
openMRSActionProxyService.updatePerson(CONFIG_NAME, person.getUuid(), person.getPreferredName().getGivenName(),
person.getPreferredName().getMiddleName(), person.getPreferredName().getFamilyName(),
personAddress.getAddress1(), personAddress.getAddress2(), personAddress.getAddress3(),
personAddress.getAddress4(), personAddress.getAddress5(), personAddress.getAddress6(),
personAddress.getCityVillage(), personAddress.getStateProvince(), personAddress.getCountry(),
personAddress.getPostalCode(), personAddress.getCountyDistrict(), personAddress.getLatitude(),
personAddress.getLongitude(), new DateTime(personAddress.getStartDate()),
new DateTime(personAddress.getEndDate()), new DateTime(person.getBirthdate()),
person.getBirthdateEstimated(), person.getGender(), person.getDead(), causeOfDeath.getUuid());
verify(personService).updatePerson(eq(CONFIG_NAME), personCaptor.capture());
assertEquals(person, personCaptor.getValue());
}
@Test
public void shouldUpdatePatientIdentifiers() {
Patient patient = new Patient();
patient.setUuid("10");
Map<String, String> identifiersMap = new HashMap<>();
identifiersMap.put("CommCare CaseID", "1000");
openMRSActionProxyService.updatePatientIdentifiers(CONFIG_NAME, patient.getUuid(), identifiersMap);
verify(patientService).updatePatientIdentifiers(eq(CONFIG_NAME), patientCaptor.capture());
Identifier patientCaptorIdentifier = patientCaptor.getValue().getIdentifiers().get(0);
assertEquals(patient.getUuid(), patientCaptor.getValue().getUuid());
assertEquals("1000", patientCaptorIdentifier.getIdentifier());
assertEquals("CommCare CaseID", patientCaptorIdentifier.getIdentifierType().getName());
}
private Person createTestPerson() {
Person person = new Person();
Person.Name name = new Person.Name();
name.setGivenName("John");
name.setMiddleName("Robert");
name.setFamilyName("Smith");
person.setPreferredName(name);
person.setNames(Collections.singletonList(name));
Person.Address address = new Person.Address("address 1", "address 2", "address 3", "address 4", "address 5",
"address 6", "City", "State", "Country", "000000", "County district", "30", "50",
new DateTime("2000-08-16T07:22:05Z").toDate(), new DateTime("2100-08-16T07:22:05Z").toDate());
person.setPreferredAddress(address);
person.setAddresses(Collections.singletonList(address));
person.setGender("M");
person.setBirthdate(new DateTime("2000-08-16T07:22:05Z").toDate());
person.setBirthdateEstimated(true);
person.setDead(false);
return person;
}
private Concept createTestConcept(String testConceptName) {
Concept concept = new Concept();
ConceptName conceptName = new ConceptName(testConceptName);
concept.setNames(Collections.singletonList(conceptName));
concept.setDatatype(new Concept.DataType("TEXT"));
concept.setConceptClass(new Concept.ConceptClass("Test"));
concept.setUuid("100");
return concept;
}
private List<Observation> createObservationList() {
Observation observation = new Observation();
ConceptName conceptName = new ConceptName("testConceptName");
Concept concept = new Concept(conceptName);
observation.setConcept(concept);
observation.setValue(new Observation.ObservationValue("testObservationValueName"));
observation.setObsDatetime(new DateTime("2000-08-16T07:22:05Z").toDate());
return Collections.singletonList(observation);
}
}
| |
/*
* Copyright 2020 Verizon Media
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yahoo.athenz.common.server.util;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yahoo.athenz.auth.Authority;
import com.yahoo.athenz.auth.Principal;
import com.yahoo.athenz.auth.util.StringUtils;
import com.yahoo.athenz.common.config.AuthzDetailsEntity;
import com.yahoo.athenz.common.server.rest.ResourceException;
import com.yahoo.athenz.zms.*;
import com.yahoo.rdl.Struct;
import com.yahoo.rdl.Timestamp;
import org.eclipse.jetty.util.StringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
public class AuthzHelper {
private static final Logger LOGGER = LoggerFactory.getLogger(AuthzHelper.class);
private static final String ASSUME_ROLE = "assume_role";
private static final ObjectMapper JSON_MAPPER = initJsonMapper();
static ObjectMapper initJsonMapper() {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true);
return objectMapper;
}
public static void removeRoleMembers(List<RoleMember> originalRoleMembers, List<RoleMember> removeRoleMembers) {
if (removeRoleMembers == null || originalRoleMembers == null) {
return;
}
for (RoleMember removeMember : removeRoleMembers) {
originalRoleMembers.removeIf(item -> item.getMemberName().equalsIgnoreCase(removeMember.getMemberName()));
}
}
public static void removeGroupMembers(List<GroupMember> originalGroupMembers, List<GroupMember> removeGroupMembers) {
if (removeGroupMembers == null || originalGroupMembers == null) {
return;
}
for (GroupMember removeMember : removeGroupMembers) {
originalGroupMembers.removeIf(item -> item.getMemberName().equalsIgnoreCase(removeMember.getMemberName()));
}
}
public static boolean isMemberDisabled(Integer systemDisabled) {
return systemDisabled != null && systemDisabled != 0;
}
public static boolean isMemberExpired(Timestamp expiration, long currentTime) {
return (expiration != null && expiration.millis() < currentTime);
}
public static boolean shouldSkipGroupMember(GroupMember member, long currentTime) {
return isMemberDisabled(member.getSystemDisabled()) || isMemberExpired(member.getExpiration(), currentTime);
}
public static boolean isMemberOfGroup(List<GroupMember> groupMembers, final String member) {
if (groupMembers == null) {
return false;
}
return checkGroupMemberValidity(groupMembers, member);
}
public static boolean checkGroupMemberValidity(List<GroupMember> groupMembers, final String member) {
// we need to make sure that both the user is not expired
// and not disabled by the system
boolean isMember = false;
long currentTime = System.currentTimeMillis();
for (GroupMember memberInfo: groupMembers) {
final String memberName = memberInfo.getMemberName();
if (memberNameMatch(memberName, member)) {
isMember = !isMemberDisabled(memberInfo.getSystemDisabled()) && !isMemberExpired(memberInfo.getExpiration(), currentTime);
break;
}
}
return isMember;
}
public static boolean memberNameMatch(final String memberName, final String matchName) {
// we are supporting 4 formats for role members
// *, <domain>.[user]*, <domain>.<user>, and <domain>:group.<group-name>
// special handling for wildcards only
if (memberName.equals("*")) {
return true;
} else if (memberName.endsWith("*")) {
return matchName.startsWith(memberName.substring(0, memberName.length() - 1));
} else {
return memberName.equals(matchName);
}
}
public static boolean shouldRunDelegatedTrustCheck(final String trust, final String trustDomain) {
// if no trust field field then no delegated trust check
if (trust == null) {
return false;
}
// if no specific trust domain specifies then we need
// run the delegated trust check for this domain
if (trustDomain == null) {
return true;
}
// otherwise we'll run the delegated trust check only if
// domain name matches
return trust.equalsIgnoreCase(trustDomain);
}
public static String retrieveResourceDomain(String resource, String op, String trustDomain) {
// special handling for ASSUME_ROLE assertions. Since any assertion with
// that action refers to a resource in another domain, there is no point
// to retrieve the domain name from the resource. In these cases the caller
// must specify the trust domain attribute so we'll use that instead and
// if one is not specified then we'll fall back to using the domain name
// from the resource
String domainName;
if (ASSUME_ROLE.equalsIgnoreCase(op) && trustDomain != null) {
domainName = trustDomain;
} else {
domainName = extractResourceDomainName(resource);
}
return domainName;
}
public static String extractResourceDomainName(final String resource) {
int idx = resource.indexOf(':');
if (idx == -1) {
return null;
}
return resource.substring(0, idx);
}
public static boolean authorityAuthorizationAllowed(Principal principal) {
Authority authority = principal.getAuthority();
if (authority == null) {
return true;
}
return authority.allowAuthorization();
}
public static boolean checkRoleMemberValidity(List<RoleMember> roleMembers, final String member,
GroupMembersFetcher groupMembersFetcher) {
// we need to make sure that both the user is not expired
// and not disabled by the system. the members can also
// include groups so that means even if we get a response
// from one group that is expired, we can't just stop
// and need to check the other groups as well.
// For efficiency reasons we'll process groups at the
// end so in case we get a match in a role there is no
// need to look at the groups at all
List<RoleMember> groupMembers = new ArrayList<>();
for (RoleMember memberInfo: roleMembers) {
if (memberInfo.getPrincipalType() != null && memberInfo.getPrincipalType() == Principal.Type.GROUP.getValue()) {
groupMembers.add(memberInfo);
}
}
// first only process regular members
boolean isMember = false;
long currentTime = System.currentTimeMillis();
for (RoleMember memberInfo: roleMembers) {
if (memberInfo.getPrincipalType() != null && memberInfo.getPrincipalType() == Principal.Type.GROUP.getValue()) {
continue;
}
final String memberName = memberInfo.getMemberName();
if (memberNameMatch(memberName, member)) {
isMember = !isMemberDisabled(memberInfo.getSystemDisabled()) && !isMemberExpired(memberInfo.getExpiration(), currentTime);
break;
}
}
// if we have a match or no group members then we're done
if (isMember || groupMembers.isEmpty()) {
return isMember;
}
// now let's process our groups
for (RoleMember memberInfo : groupMembers) {
// if the group is expired there is no need to check
if (isMemberExpired(memberInfo.getExpiration(), currentTime)) {
continue;
}
isMember = isMemberOfGroup(groupMembersFetcher.getGroupMembers(memberInfo.getMemberName()), member);
if (isMember) {
break;
}
}
return isMember;
}
public static boolean isMemberOfRole(Role role, final String member, GroupMembersFetcher groupMembersFetcher) {
final List<RoleMember> members = role.getRoleMembers();
if (members == null) {
return false;
}
return checkRoleMemberValidity(members, member, groupMembersFetcher);
}
public static boolean assumeRoleResourceMatch(String roleName, Assertion assertion) {
if (!ASSUME_ROLE.equalsIgnoreCase(assertion.getAction())) {
return false;
}
String rezPattern = StringUtils.patternFromGlob(assertion.getResource());
return roleName.matches(rezPattern);
}
public static boolean matchDelegatedTrustPolicy(Policy policy, final String roleName, final String roleMember,
List<Role> roles, GroupMembersFetcher groupMembersFetcher) {
List<Assertion> assertions = policy.getAssertions();
if (assertions == null) {
return false;
}
for (Assertion assertion : assertions) {
if (matchDelegatedTrustAssertion(assertion, roleName, roleMember, roles, groupMembersFetcher)) {
return true;
}
}
return false;
}
public static boolean matchDelegatedTrustAssertion(Assertion assertion, final String roleName, final String roleMember,
List<Role> roles, GroupMembersFetcher groupMembersFetcher) {
if (!assumeRoleResourceMatch(roleName, assertion)) {
return false;
}
String rolePattern = StringUtils.patternFromGlob(assertion.getRole());
for (Role role : roles) {
String name = role.getName();
if (!name.matches(rolePattern)) {
continue;
}
if (isMemberOfRole(role, roleMember, groupMembersFetcher)) {
return true;
}
}
return false;
}
public static AuthzDetailsEntity convertEntityToAuthzDetailsEntity(Entity entity) throws JsonProcessingException {
Struct value = entity.getValue();
if (value == null) {
throw new ResourceException(ResourceException.BAD_REQUEST, "Entity has no value");
}
// the authorization details is the value of the data field
final String authzDetails = value.getString("data");
if (StringUtil.isEmpty(authzDetails)) {
throw new ResourceException(ResourceException.BAD_REQUEST, "Entity has no data field");
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Authorization Details json input: {}", authzDetails);
}
return JSON_MAPPER.readValue(authzDetails, AuthzDetailsEntity.class);
}
/**
* Extract group members for a given group name
*/
public interface GroupMembersFetcher {
/**
*
* @param groupName name of a group
* @return group members extracted from the given group
*/
List<GroupMember> getGroupMembers(String groupName);
}
}
| |
package uk.ac.manchester.cs.jfact.helpers;
/* This file is part of the JFact DL reasoner
Copyright 2011 by Ignazio Palmisano, Dmitry Tsarkov, University of Manchester
This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA*/
import java.util.Arrays;
public class FastSetSimple extends AbstractFastSet {
protected int[] values;
protected int size = 0;
protected static int defaultSize = 16;
protected int insertionIndex(int key) {
if (key < values[0]) {
return -1;
}
if (key > values[size - 1]) {
return -size - 1;
}
int lowerbound = 0;
if (size < AbstractFastSet.limit) {
for (; lowerbound < size; lowerbound++) {
if (values[lowerbound] > key) {
return -lowerbound - 1;
}
if (values[lowerbound] == key) {
return lowerbound;
}
}
return -lowerbound - 1;
}
int upperbound = size - 1;
while (lowerbound <= upperbound) {
int delta = upperbound - lowerbound;
int intermediate = lowerbound + delta / 2;
if (values[intermediate] == key) {
return intermediate;
}
if (values[intermediate] < key) {
lowerbound = intermediate + 1;
} else {
upperbound = intermediate - 1;
}
}
return -lowerbound - 1;
}
public FastSetSimple() {}
public FastSetSimple(FastSetSimple c1, FastSetSimple c2) {
values = new int[(c1.size + c2.size) / defaultSize * defaultSize + defaultSize];
int i = 0;
int j = 0;
int index = 0;
for (; i < c1.size && j < c2.size; index++) {
if (c1.values[i] < c2.values[j]) {
values[index] = c1.values[i];
i++;
} else if (c2.values[j] < c1.values[i]) {
values[index] = c2.values[j];
j++;
}
// the result must be a set: equal elements advance both indexes
else if (c1.values[i] == c2.values[j]) {
values[index] = c1.values[i];
i++;
j++;
}
}
// remaining elements in one set or the other
if (i < c1.size) {
for (; i < c1.size; i++, index++) {
values[index] = c1.values[i];
}
// new size here
size = index;
} else {
for (; j < c2.size; j++, index++) {
values[index] = c2.values[j];
}
// new size here
size = index;
}
}
@Override
public int get(int i) {
if (values != null) {
return values[i];
}
throw new IllegalArgumentException("Illegal argument " + i + ": no such element");
}
protected void init() {
values = new int[defaultSize];
size = 0;
}
@Override
public void add(int e) {
int pos = -1;
if (values == null) {
init();
// pos stays at -1, in an empty set that's the place to start - it
// will become 0
} else {
// else find the right place
pos = insertionIndex(e);
}
if (pos > -1) {
return;
}
int i = -pos - 1;
// i is now the insertion point
if (i >= values.length || size >= values.length) {
// no space left, increase
values = Arrays.copyOf(values, 2 * values.length);
}
// size ensured, shift and insert now
for (int j = size - 1; j >= i; j--) {
values[j + 1] = values[j];
}
values[i] = e;
// increase used size
size++;
}
@Override
public void addAll(FastSet c) {
if (c.isEmpty()) {
return;
}
// merge two sorted arrays: how bad can it be?
if (values == null) {
// extreme case: just copy the other set
values = Arrays.copyOf(((FastSetSimple) c).values, c.size());
size = c.size();
return;
}
int newsize = size + c.size();
int[] merge = new int[newsize / defaultSize * defaultSize + defaultSize];
int i = 0;
int j = 0;
int index = 0;
for (; i < size() && j < c.size(); index++) {
if (values[i] < c.get(j)) {
merge[index] = values[i];
i++;
} else if (c.get(j) < values[i]) {
merge[index] = c.get(j);
j++;
}
// the result must be a set: equal elements advance both indexes
else if (values[i] == c.get(j)) {
merge[index] = values[i];
i++;
j++;
}
}
// remaining elements in one set or the other
if (i < size()) {
for (; i < size(); i++, index++) {
merge[index] = values[i];
}
// new size here
newsize = index;
} else {
for (; j < c.size(); j++, index++) {
merge[index] = c.get(j);
}
// new size here
newsize = index;
}
values = merge;
size = newsize;
}
@Override
public void clear() {
values = null;
size = 0;
}
@Override
public boolean contains(int o) {
if (values != null) {
int i = insertionIndex(o);
boolean toReturn = i > -1;
return toReturn;
}
return false;
}
@Override
public boolean containsAll(FastSet c) {
if (c.isEmpty()) {
return true;
}
if (isEmpty()) {
return false;
}
if (c.size() > size) {
return false;
}
if (get(0) > c.get(0) || get(size - 1) < c.get(c.size() - 1)) {
// c boundaries are outside this set
return false;
}
int i = 0;
int j = 0;
int currentValue;
while (j < c.size()) {
currentValue = c.get(j);
boolean found = false;
while (i < size) {
if (get(i) == currentValue) {
// found the current value, next element in c - increase j
found = true;
break;
}
if (get(i) > currentValue) {
// found a value larger than the value it's looking for - c
// is not contained
return false;
}
// get(i) is < than current value: check next i
i++;
}
if (!found) {
// finished exploring this and currentValue was not found - it
// happens if currentValue < any element in this set
return false;
}
j++;
}
return true;
}
@Override
public boolean isEmpty() {
return values == null;
}
@Override
public boolean containsAny(FastSet c) {
if (c.isEmpty() || size == 0) {
return false;
}
int i = 0;
int j = 0;
int currentValue;
while (j < c.size()) {
currentValue = c.get(j);
while (i < size) {
if (get(i) == currentValue) {
// found the current value, next element in c - increase j
return true;
}
if (get(i) > currentValue) {
// found a value larger than the value it's looking for - c
// is not contained
break;
}
// get(i) is < than current value: check next i
i++;
}
j++;
}
return false;
}
@Override
public void remove(int o) {
if (values == null) {
return;
}
int i = insertionIndex(o);
removeAt(i);
}
@Override
public int size() {
return size;
}
@Override
public int[] toIntArray() {
if (values == null) {
return new int[0];
}
return Arrays.copyOf(values, size);
}
@Override
public boolean intersect(FastSet f) {
return containsAny(f);
}
@Override
public boolean equals(Object arg0) {
if (arg0 == null) {
return false;
}
if (this == arg0) {
return true;
}
if (arg0 instanceof FastSet) {
FastSet arg = (FastSet) arg0;
if (size != arg.size()) {
return false;
}
for (int i = 0; i < size(); i++) {
if (arg.get(i) != get(i)) {
return false;
}
}
return true;
}
return false;
}
@Override
public int hashCode() {
return super.hashCode();
}
@Override
public void removeAt(int i) {
if (values == null) {
return;
}
if (i > -1 && i < size) {
if (size == 1) {
values = null;
size = 0;
return;
}
for (int j = i; j < size - 1; j++) {
values[j] = values[j + 1];
}
size--;
}
if (size == 0) {
values = null;
}
}
@Override
public void removeAll(int i, int end) {
if (values == null) {
return;
}
if (end < -1 || end < i || end > size || i < -1 || i > size) {
throw new IllegalArgumentException("illegal arguments: " + i + " " + end
+ " size: " + size);
}
if (size == 1 || i == 0 && end == size) {
values = null;
size = 0;
return;
}
if (end == size) {
size = i;
} else {
int delta = end - i;
for (int j = i; j < size - delta; j++) {
values[j] = values[j + delta];
}
size -= delta;
}
if (size == 0) {
values = null;
}
}
@Override
public void removeAllValues(int... vals) {
if (values == null) {
return;
}
if (vals.length == 1) {
remove(vals[0]);
return;
}
Arrays.sort(vals);
int originalsize = size;
for (int i = 0, j = 0; i < originalsize && j < vals.length; i++) {
if (values[i] == vals[j]) {
values[i] = Integer.MAX_VALUE;
size--;
j++;
}
}
if (size == 0) {
values = null;
} else {
Arrays.sort(values, 0, originalsize);
}
}
@Override
public void completeSet(int value) {
for (int i = 0; i <= value; i++) {
add(i);
}
// XXX notice: these sets go to negative numbers. Is this the best way?
}
}
| |
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.segment.indexing;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
import com.metamx.common.IAE;
import com.metamx.common.logger.Logger;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.InputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.segment.indexing.granularity.GranularitySpec;
import io.druid.segment.indexing.granularity.UniformGranularitySpec;
import java.util.Arrays;
import java.util.Map;
import java.util.Set;
/**
*/
public class DataSchema
{
private static final Logger log = new Logger(DataSchema.class);
private final String dataSource;
private final Map<String, Object> parser;
private final AggregatorFactory[] aggregators;
private final GranularitySpec granularitySpec;
private final ObjectMapper jsonMapper;
@JsonCreator
public DataSchema(
@JsonProperty("dataSource") String dataSource,
@JsonProperty("parser") Map<String, Object> parser,
@JsonProperty("metricsSpec") AggregatorFactory[] aggregators,
@JsonProperty("granularitySpec") GranularitySpec granularitySpec,
@JacksonInject ObjectMapper jsonMapper
)
{
this.jsonMapper = Preconditions.checkNotNull(jsonMapper, "null ObjectMapper.");
this.dataSource = Preconditions.checkNotNull(dataSource, "dataSource cannot be null. Please provide a dataSource.");
this.parser = parser;
if (aggregators.length == 0) {
log.warn("No metricsSpec has been specified. Are you sure this is what you want?");
}
this.aggregators = aggregators;
if (granularitySpec == null) {
log.warn("No granularitySpec has been specified. Using UniformGranularitySpec as default.");
this.granularitySpec = new UniformGranularitySpec(null, null, null);
} else {
this.granularitySpec = granularitySpec;
}
}
@JsonProperty
public String getDataSource()
{
return dataSource;
}
@JsonProperty("parser")
public Map<String, Object> getParserMap()
{
return parser;
}
@JsonIgnore
public InputRowParser getParser()
{
if(parser == null) {
log.warn("No parser has been specified");
return null;
}
final InputRowParser inputRowParser = jsonMapper.convertValue(this.parser, InputRowParser.class);
final Set<String> dimensionExclusions = Sets.newHashSet();
for (AggregatorFactory aggregator : aggregators) {
dimensionExclusions.addAll(aggregator.requiredFields());
dimensionExclusions.add(aggregator.getName());
}
if (inputRowParser.getParseSpec() != null) {
final DimensionsSpec dimensionsSpec = inputRowParser.getParseSpec().getDimensionsSpec();
final TimestampSpec timestampSpec = inputRowParser.getParseSpec().getTimestampSpec();
// exclude timestamp from dimensions by default, unless explicitly included in the list of dimensions
if (timestampSpec != null) {
final String timestampColumn = timestampSpec.getTimestampColumn();
if (!(dimensionsSpec.hasCustomDimensions() && dimensionsSpec.getDimensions().contains(timestampColumn))) {
dimensionExclusions.add(timestampColumn);
}
}
if (dimensionsSpec != null) {
final Set<String> metSet = Sets.newHashSet();
for (AggregatorFactory aggregator : aggregators) {
metSet.add(aggregator.getName());
}
final Set<String> dimSet = Sets.newHashSet(dimensionsSpec.getDimensions());
final Set<String> overlap = Sets.intersection(metSet, dimSet);
if (!overlap.isEmpty()) {
throw new IAE(
"Cannot have overlapping dimensions and metrics of the same name. Please change the name of the metric. Overlap: %s",
overlap
);
}
return inputRowParser.withParseSpec(
inputRowParser.getParseSpec()
.withDimensionsSpec(
dimensionsSpec
.withDimensionExclusions(
Sets.difference(dimensionExclusions, dimSet)
)
)
);
} else {
return inputRowParser;
}
} else {
log.warn("No parseSpec in parser has been specified.");
return inputRowParser;
}
}
@JsonProperty("metricsSpec")
public AggregatorFactory[] getAggregators()
{
return aggregators;
}
@JsonProperty
public GranularitySpec getGranularitySpec()
{
return granularitySpec;
}
public DataSchema withGranularitySpec(GranularitySpec granularitySpec)
{
return new DataSchema(dataSource, parser, aggregators, granularitySpec, jsonMapper);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DataSchema that = (DataSchema) o;
if (!dataSource.equals(that.dataSource)) {
return false;
}
if (parser != null ? !parser.equals(that.parser) : that.parser != null) {
return false;
}
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(aggregators, that.aggregators)) {
return false;
}
return granularitySpec.equals(that.granularitySpec);
}
@Override
public int hashCode()
{
int result = dataSource.hashCode();
result = 31 * result + (parser != null ? parser.hashCode() : 0);
result = 31 * result + Arrays.hashCode(aggregators);
result = 31 * result + granularitySpec.hashCode();
return result;
}
@Override
public String toString()
{
return "DataSchema{" +
"dataSource='" + dataSource + '\'' +
", parser=" + parser +
", aggregators=" + Arrays.toString(aggregators) +
", granularitySpec=" + granularitySpec +
'}';
}
}
| |
package com.example.pair.app;
import android.annotation.TargetApi;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.res.Configuration;
import android.os.Build;
import android.support.v7.app.ActionBarActivity;
import android.os.Bundle;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.GridLayout;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Collections;
public class MainActivity extends ActionBarActivity {
private ArrayList<Integer> imgIdentifier;
private int defaultBgID;
private int matchBgID;
private ArrayList<Button> buttons;
private int lastClickedIndex;
private Button startBtn;
private boolean gameStart;
private int operationCount;
private int matchCount = 0;
private TextView operationView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
defaultBgID = R.drawable.defaultbg;
matchBgID = R.drawable.match;
startBtn = (Button)findViewById(R.id.start);
gameStart = false;
lastClickedIndex = -1;
operationCount = 0;
matchCount = 0;
operationView = (TextView)findViewById(R.id.operation);
operationView.setText("Total operations: " + Integer.toString(operationCount));
buttons = new ArrayList<Button>();
imgIdentifier = new ArrayList<Integer>();
for(int i = 0;i < 16;i++){
String btnName = "button" + Integer.toString(i+1);
int id = getResources().getIdentifier(btnName,"id",getPackageName());
Button btn = (Button)(findViewById(id));
btn.setBackgroundResource(R.drawable.defaultbg);
buttons.add(btn);
}
final ArrayList<Integer> numList = new ArrayList<Integer>();
for(int i = 0;i < 8;i++){
numList.add(i);
numList.add(i);
}
Collections.shuffle(numList);
for(int i = 0;i < 8;i++){
String imgName = "bird" + Integer.toString(i+1);
int imgID = getResources().getIdentifier(imgName,"drawable",getPackageName());
imgIdentifier.add(imgID);
}
final AlertDialog.Builder reminder = new AlertDialog.Builder(this);
reminder.setMessage("Press the START button to start!");
reminder.setTitle("Reminder");
reminder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
dialogInterface.dismiss();
}
});
final AlertDialog.Builder success = new AlertDialog.Builder(this);
success.setMessage("All pairs are found!");
success.setTitle("Congratulations!");
success.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
dialogInterface.dismiss();
}
});
startBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if(!gameStart){
startBtn.setText("restart");
gameStart = true;
}
else{
startBtn.setText("start");
lastClickedIndex = -1;
Collections.shuffle(numList);
operationCount = 0;
operationView.setText("Total operations: " + Integer.toString(operationCount));
gameStart = false;
for(int i = 0;i < 16;i++){
buttons.get(i).setBackgroundResource(defaultBgID);
buttons.get(i).setClickable(true);
}
}
}
});
for(int i = 1;i < 17;i++){
final int k = i;
buttons.get(i-1).setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
if(!gameStart){
reminder.show();
return;
}
operationCount++;
operationView.setText("Total operations: " + Integer.toString(operationCount));
int currClickedIndex = k - 1;
if (lastClickedIndex == currClickedIndex) {
buttons.get(currClickedIndex).setBackgroundResource(defaultBgID);
lastClickedIndex = -1;
} else {
//btnClicked.set(currClickedIndex, Boolean.TRUE);
if (lastClickedIndex == -1) {
buttons.get(currClickedIndex).setBackgroundResource(imgIdentifier.get(numList.get(currClickedIndex)));
lastClickedIndex = currClickedIndex;
}else {
if (numList.get(lastClickedIndex).equals(numList.get(currClickedIndex))) {
buttons.get(currClickedIndex).setClickable(false);
buttons.get(lastClickedIndex).setClickable(false);
buttons.get(currClickedIndex).setBackgroundResource(matchBgID);
buttons.get(lastClickedIndex).setBackgroundResource(matchBgID);
lastClickedIndex = -1;
matchCount++;
if(matchCount == 8){
success.show();
matchCount = 0;
}
} else {
buttons.get(lastClickedIndex).setBackgroundResource(defaultBgID);
buttons.get(currClickedIndex).setBackgroundResource(imgIdentifier.get(numList.get(currClickedIndex)));
lastClickedIndex = currClickedIndex;
}
}
}
}
});
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
GridLayout.LayoutParams param = new GridLayout.LayoutParams();
param.columnSpec = GridLayout.spec(4, 4);
param.rowSpec = GridLayout.spec(0, 1);
operationView.setLayoutParams(param);
GridLayout.LayoutParams param1 = new GridLayout.LayoutParams();
param1.columnSpec = GridLayout.spec(4,4);
param1.rowSpec = GridLayout.spec(1, 1);
startBtn.setLayoutParams(param1);
}
else if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
GridLayout.LayoutParams param = new GridLayout.LayoutParams();
param.columnSpec = GridLayout.spec(0, 4);
param.rowSpec = GridLayout.spec(4, 1);
operationView.setLayoutParams(param);
GridLayout.LayoutParams param1 = new GridLayout.LayoutParams();
param1.columnSpec = GridLayout.spec(0, 4);
param1.rowSpec = GridLayout.spec(5, 1);
startBtn.setLayoutParams(param1);
startBtn.setGravity(Gravity.FILL);
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2014 Filipe Campos.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.uminho.di.gsd.gossip.service.operations.aggregation;
import java.io.StringReader;
import org.apache.log4j.Logger;
import org.uminho.di.gsd.common.ApplicationServiceConstants;
import org.uminho.di.gsd.common.CommunicationProtocol;
import org.uminho.di.gsd.common.Constants;
import org.uminho.di.gsd.common.MessageUtil;
import org.uminho.di.gsd.gossip.service.repo.aggregation.AggregationMessage;
import org.ws4d.java.communication.TimeoutException;
import org.ws4d.java.schema.ComplexType;
import org.ws4d.java.schema.Element;
import org.ws4d.java.service.InvocationException;
import org.ws4d.java.service.parameter.ParameterValue;
import org.ws4d.java.types.URI;
/**
*
* @author fjoc
*/
public class AggPullOperation extends AggregationOperation {
static Logger logger = Logger.getLogger(AggPullOperation.class);
public AggPullOperation() {
super(Constants.AggPullOperationName, Constants.AggregationPortQName);
initInput();
initOutput();
}
@Override
protected void initInput() {
ComplexType req = new ComplexType(Constants.AggPullRequestTypeQName, ComplexType.CONTAINER_SEQUENCE);
req.addElement(getSvcEprElement());
req.addElement(getRoundsElement());
req.addElement(getXsltActionListElement());
Element in = new Element(Constants.AggPullRequestElementQName, req);
setInput(in);
}
@Override
protected void initOutput() {
ComplexType resp = new ComplexType(Constants.AggPullResponseTypeQName, ComplexType.CONTAINER_SEQUENCE);
resp.addElement(getMessagesListElement());
Element out = new Element(Constants.AggPullResponseElementQName, resp);
setOutput(out);
}
@Override
public ParameterValue invoke(ParameterValue parameterValue) throws InvocationException, TimeoutException {
long nanoTime = System.nanoTime();
long millisTime = System.currentTimeMillis();
return common_invoke(CommunicationProtocol.TCP, nanoTime, millisTime, null, parameterValue);
}
public ParameterValue common_invoke(CommunicationProtocol communicationProtocol, long nanoTime, long millisTime, URI sender, ParameterValue pv) {
ParameterValue ret = null;
// pick xslts and messages
// check number of XsltMessage elements
String prefix = Constants.XsltActionListElementName + "/" + Constants.XsltActionElementName;
int num = pv.getChildrenCount(prefix);
logger.debug("Got " + num + " XsltMessages!");
prefix += "[";
if (num > 0) {
// if there are actions
// initialize serializer
// StringReader reader = null;
String xslt = null;
ret = createOutputValue();
int counter = 0;
String src = pv.getValue(Constants.SvcEprElementName);
String roundsStr = pv.getValue(Constants.RoundsElementName);
logger.debug("Got message with " + roundsStr + " rounds!");
int rounds = Integer.parseInt(roundsStr);
// cycle messages and extract each xslt
// WARNING: Assuming a single message
for (int i = 0; i < num; i++) {
String tempPrefix = prefix + i + "]/";
// extract xslt
xslt = extractXSLT(tempPrefix, pv);
if ((xslt != null) && (!xslt.isEmpty())) {
// If there is an xslt
String responsePrefix = Constants.MessagesListElementName + "/"
+ Constants.MessageContainerElementName + "[" + counter + "]/";
ret.setValue(responsePrefix + Constants.RoundsElementName, Integer.toString(0));
// set action
String action = pv.getValue(tempPrefix + Constants.MessageInfoElementName + "/" + Constants.ActionElementName);
ret.setValue(responsePrefix + Constants.ActionElementName, action);
// set msgid
String msgId = pv.getValue(tempPrefix + Constants.MessageInfoElementName + "/" + Constants.MessageIdentifierElementName);
ret.setValue(responsePrefix + Constants.MessageIdentifierElementName, msgId);
// invoke agg on peers assuming that only one XSLTMessage is being sent
AggregationMessage agg = (AggregationMessage) processor.getMessage(new URI(msgId));
String val = null;
String current = Double.toString(processor.getService().getAppService().getLastValue());
if (agg == null) {
logger.debug("New AggPull message with id: " + msgId);
// new message, so invoke on peers
agg = new AggregationMessage(new URI(msgId), new URI(action), 0, current, 0, current, maxFanout, waitFanout, timeout, xslt);
processor.addMessage(agg);
}
else {
logger.debug("Duplicate AggPull message with id: " + msgId);
if (rounds == 0)
{
logger.debug("Got 0 rounds with known message " + msgId + "! Timeouting aggregated message...");
agg.timeout();
}
}
if (rounds == 0)
{
logger.debug("Got 0 rounds with a new message " + msgId + "! Setting aggregated value as " + current);
val = current;
}
else
{
// set my own epr
pv.setValue(Constants.SvcEprElementName, processor.getService().getSvcEPR());
// set rounds decremented
pv.setValue(Constants.RoundsElementName, "" + (--rounds));
if (communicationProtocol.equals(CommunicationProtocol.TCP)) {
val = processor.process_tcp_agg_pull(pv, nanoTime, msgId, agg, src);
} else {
// UDP
// processor.process_udp_agg_pull(pv, nanoTime, msgId, agg, sender);
}
}
// duplicate message, so just wait for response to return reply
val = agg.getResponseValue();
// }
if ((val != null) && (!val.isEmpty())) {
processor.getService().getAppService().setLastValue(Double.parseDouble(val));
// store aggregate value to array
processor.setCurrentAggregateValue(msgId, val);
} else {
logger.warn("Some error occurred processing the XSLT. Using last value...");
val = Double.toString(processor.getService().getAppService().getLastValue());
}
// set resulting message
ret.setValue(responsePrefix + Constants.MessageElementName + "/" + ApplicationServiceConstants.infoTempValueElementName, val);
counter++;
processor.removeMessage(new URI(msgId));
}
else
{
logger.debug("Error with message! Didn't get XSLT!");
}
}
}
return ret;
}
public void invoke_request(long nanoTime, long millisTime, URI sender, ParameterValue pv) {
// UDP request
// check number of XsltMessage elements
String prefix = Constants.XsltActionListElementName + "/" + Constants.XsltActionElementName;
int num = pv.getChildrenCount(prefix);
logger.debug("Got " + num + " XsltMessages!");
prefix += "[";
if (num > 0) {
// if there are actions
// initialize serializer
String xslt = null;
String src = pv.getValue(Constants.SvcEprElementName);
// cycle messages and extract each xslt
// WARNING: Assuming a single message
for (int i = 0; i < num; i++) {
String tempPrefix = prefix + i + "]/";
// extract xslt
xslt = extractXSLT(tempPrefix, pv);
if ((xslt != null) && (!xslt.isEmpty())) {
// If there is an xslt
// get msgid
String msgId = pv.getValue(tempPrefix + Constants.MessageInfoElementName + "/" + Constants.MessageIdentifierElementName);
// invoke agg on peers assuming that only one XSLTMessage is being sent
AggregationMessage agg = (AggregationMessage) processor.getMessage(new URI(msgId));
if (agg == null) {
// new aggregation message
logger.debug("New AggPull message with id: " + msgId);
// set action
String action = pv.getValue(tempPrefix + Constants.MessageInfoElementName + "/" + Constants.ActionElementName);
String current = Double.toString(processor.getService().getAppService().getLastValue());
// new message, so invoke on peers
agg = new AggregationMessage(new URI(msgId), new URI(action), 0, current, 0, current, maxFanout, waitFanout, timeout, xslt);
agg.addInvoker(sender);
processor.addMessage(agg);
ParameterValue message = createInputValue();
// set my own epr
message.setValue(Constants.SvcEprElementName, processor.getService().getSvcEPR());
message = MessageUtil.duplicateRoundsPV(pv, message);
message = MessageUtil.duplicateXSLTActionListPV(pv, message);
logger.debug("Received message from " + src + " Changing SvcEpr to " + processor.getService().getSvcEPR() + ". Got " + message.getValue(Constants.SvcEprElementName));
processor.process_udp_agg_pull(message, nanoTime, msgId, agg, sender);
}
else
{
// existing aggregation message
logger.debug("AggregationMessage object already exists with id " + agg.getIdentifier() + "! Adding sender " + sender + "...");
if(logger.isDebugEnabled())
{
logger.debug("Current responses for " + agg.getIdentifier() + " are: " + agg.getCurrentResponses());
}
agg.addInvoker(sender);
}
}
}
}
}
public void invoke_response(long nanoTime, long millisTime, URI sender, ParameterValue pv) {
// UDP response
// pick agg message id and insert response in agg object
logger.debug("Received reply: " + pv + " from " + sender);
// get msgid
String msgId = pv.getValue(Constants.MessagesListElementName + "/"
+ Constants.MessageContainerElementName + "[" + 0 + "]/"
+ Constants.MessageIdentifierElementName);
String value = pv.getValue(Constants.MessagesListElementName + "/"
+ Constants.MessageContainerElementName + "[" + 0 + "]/"
+ Constants.MessageElementName + "/"
+ ApplicationServiceConstants.infoTempValueElementName);
// get agg object
AggregationMessage agg = (AggregationMessage) processor.getMessage(new URI(msgId));
logger.debug("Received response for AggregationMessage with id " + msgId + " with value: " + value);
// insert response
agg.addResponse(value);
// processor.setCurrentAggregateValue(msgId, value);
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.nopy.html.elements.semantics;
import com.nopy.html.attributes.Angular;
import com.nopy.html.attributes.Element;
import com.nopy.html.attributes.Global;
/**
*
* @author RODRIGO
*/
public class B {
private final Element element;
private final Global global;
private final Angular angular;
public B() {
this.element = new Element();
this.global = new Global();
this.angular = new Angular();
}
public B setElement(Object content) {
this.element.setElement(content);
return this;
}
/*
ATRIBUTOS
*/
/*
GLOBAL
*/
public B setId(Object value) {
this.global.setId(value);
return this;
}
public B setClass(Object value) {
this.global.setClass(value);
return this;
}
public B setStyle(Object value) {
this.global.setStyle(value);
return this;
}
public B setTitle(Object value) {
this.global.setTitle(value);
return this;
}
public B setAccesskey(Object accesskey) {
this.global.setAccesskey(accesskey);
return this;
}
public B setContenteditable(Object contenteditable) {
this.global.setContenteditable(contenteditable);
return this;
}
public B setDir(Object dir) {
this.global.setDir(dir);
return this;
}
public B setDraggable(Object draggable) {
this.global.setDraggable(draggable);
return this;
}
public B setDropzone(Object dropzone) {
this.global.setDropzone(dropzone);
return this;
}
public B setHidden(Object hidden) {
this.global.setHidden(hidden);
return this;
}
public B setLang(Object lang) {
this.global.setLang(lang);
return this;
}
public B setSpellcheck(Object spellcheck) {
this.global.setSpellcheck(spellcheck);
return this;
}
public B setTabindex(Object tabindex) {
this.global.setTabindex(tabindex);
return this;
}
public B setTranslate(Object translate) {
this.global.setTranslate(translate);
return this;
}
/*
ANGULAR
*/
public B setNgApp(Object ngApp) {
this.angular.setNgApp(ngApp);
return this;
}
public B setNgBind(Object ngBind) {
this.angular.setNgBind(ngBind);
return this;
}
public B setNgBindHtml(Object ngBindHtml) {
this.angular.setNgBindHtml(ngBindHtml);
return this;
}
public B setNgBindTemplate(Object ngBindTemplate) {
this.angular.setNgBindTemplate(ngBindTemplate);
return this;
}
public B setNgBlur(Object ngBlur) {
this.angular.setNgBlur(ngBlur);
return this;
}
public B setNgChange(Object ngChange) {
this.angular.setNgChange(ngChange);
return this;
}
public B setNgChecked(Object ngChecked) {
this.angular.setNgChecked(ngChecked);
return this;
}
public B setNgClass(Object ngClass) {
this.angular.setNgClass(ngClass);
return this;
}
public B setNgClassEven(Object ngClassEven) {
this.angular.setNgClassEven(ngClassEven);
return this;
}
public B setNgClassOdd(Object ngClassOdd) {
this.angular.setNgClassOdd(ngClassOdd);
return this;
}
public B setNgClick(Object ngClick) {
this.angular.setNgClick(ngClick);
return this;
}
public B setNgCloak(Object ngCloak) {
this.angular.setNgCloak(ngCloak);
return this;
}
public B setNgController(Object ngController) {
this.angular.setNgController(ngController);
return this;
}
public B setNgCopy(Object ngCopy) {
this.angular.setNgCopy(ngCopy);
return this;
}
public B setNgCsp(Object ngCsp) {
this.angular.setNgCsp(ngCsp);
return this;
}
public B setNgCut(Object ngCut) {
this.angular.setNgCut(ngCut);
return this;
}
public B setNgDblclick(Object ngDblclick) {
this.angular.setNgDblclick(ngDblclick);
return this;
}
public B setNgDisable(Object ngDisable) {
this.angular.setNgDisable(ngDisable);
return this;
}
public B setNgFocus(Object ngFocus) {
this.angular.setNgFocus(ngFocus);
return this;
}
public B setNgForm(Object ngForm) {
this.angular.setNgForm(ngForm);
return this;
}
public B setNgHide(Object ngHide) {
this.angular.setNgHide(ngHide);
return this;
}
public B setNgHref(Object ngHref) {
this.angular.setNgHref(ngHref);
return this;
}
public B setNgIf(Object ngIf) {
this.angular.setNgIf(ngIf);
return this;
}
public B setNgKeydown(Object ngKeydown) {
this.angular.setNgKeydown(ngKeydown);
return this;
}
public B setNgKeypress(Object ngKeypress) {
this.angular.setNgKeypress(ngKeypress);
return this;
}
public B setNgKeyup(Object ngKeyup) {
this.angular.setNgKeyup(ngKeyup);
return this;
}
public B setNgList(Object ngList) {
this.angular.setNgList(ngList);
return this;
}
public B setNgModel(Object ngModel) {
this.angular.setNgModel(ngModel);
return this;
}
public B setNgModelOptions(Object ngModelOptions) {
this.angular.setNgModelOptions(ngModelOptions);
return this;
}
public B setNgMousedown(Object ngMousedown) {
this.angular.setNgMousedown(ngMousedown);
return this;
}
public B setNgMouseenter(Object ngMouseenter) {
this.angular.setNgMouseenter(ngMouseenter);
return this;
}
public B setNgMouseleave(Object ngMouseleave) {
this.angular.setNgMouseleave(ngMouseleave);
return this;
}
public B setNgMousemove(Object ngMousemove) {
this.angular.setNgMousemove(ngMousemove);
return this;
}
public B setNgMouseover(Object ngMouseover) {
this.angular.setNgMouseover(ngMouseover);
return this;
}
public B setNgMouseup(Object ngMouseup) {
this.angular.setNgMouseup(ngMouseup);
return this;
}
public B setNgNonBindatable(Object ngNonBindatable) {
this.angular.setNgNonBindatable(ngNonBindatable);
return this;
}
public B setNgOpen(Object ngOpen) {
this.angular.setNgOpen(ngOpen);
return this;
}
public B setNgOptions(Object ngOptions) {
this.angular.setNgOptions(ngOptions);
return this;
}
public B setNgPaste(Object ngPaste) {
this.angular.setNgPaste(ngPaste);
return this;
}
public B setNgPluralize(Object ngPluralize) {
this.angular.setNgPluralize(ngPluralize);
return this;
}
public B setNgReadonly(Object ngReadonly) {
this.angular.setNgReadonly(ngReadonly);
return this;
}
public B setNgRepeat(Object ngRepeat) {
this.angular.setNgRepeat(ngRepeat);
return this;
}
public B setNgSelected(Object ngSelected) {
this.angular.setNgSelected(ngSelected);
return this;
}
public B setNgShow(Object ngShow) {
this.angular.setNgShow(ngShow);
return this;
}
public B setNgSrc(Object ngSrc) {
this.angular.setNgSrc(ngSrc);
return this;
}
public B setNgSrcset(Object ngSrcset) {
this.angular.setNgSrcset(ngSrcset);
return this;
}
public B setNgStyle(Object ngStyle) {
this.angular.setNgStyle(ngStyle);
return this;
}
public B setNgSubmit(Object ngSubmit) {
this.angular.setNgSubmit(ngSubmit);
return this;
}
public B setNgSwitch(Object ngSwitch) {
this.angular.setNgSwitch(ngSwitch);
return this;
}
public B setNgTransclude(Object ngTransclude) {
this.angular.setNgTransclude(ngTransclude);
return this;
}
public B setNgValue(Object ngValue) {
this.angular.setNgValue(ngValue);
return this;
}
public B setNgView(Object ngView) {
this.angular.setNgView(ngView);
return this;
}
/*
GET STRING
*/
@Override
public String toString() {
return "<b"
+ this.global
+ this.angular
+ ">"
+ this.element
+ "</b>";
}
}
| |
/*
* Symphony - A modern community (forum/BBS/SNS/blog) platform written in Java.
* Copyright (C) 2012-2018, b3log.org & hacpai.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.b3log.symphony.model.feed;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import java.util.Date;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
/**
* Item.
*
* @author <a href="http://88250.b3log.org">Liang Ding</a>
* @version 1.0.0.1, Aug 20, 2018
* @since 3.1.0
*/
public final class RSSItem {
/**
* Start title element.
*/
private static final String START_TITLE_ELEMENT = "<title>";
/**
* End title element.
*/
private static final String END_TITLE_ELEMENT = "</title>";
/**
* Start link element.
*/
private static final String START_LINK_ELEMENT = "<link>";
/**
* End link element.
*/
private static final String END_LINK_ELEMENT = "</link>";
/**
* Start description element.
*/
private static final String START_DESCRIPTION_ELEMENT = "<description>";
/**
* End summary element.
*/
private static final String END_DESCRIPTION_ELEMENT = "</description>";
/**
* Start author element.
*/
private static final String START_AUTHOR_ELEMENT = "<author>";
/**
* End author element.
*/
private static final String END_AUTHOR_ELEMENT = "</author>";
/**
* Categories.
*/
private Set<RSSCategory> categories = new HashSet<>();
/**
* Start guid element.
*/
private static final String START_GUID_ELEMENT = "<guid>";
/**
* End guid element.
*/
private static final String END_GUID_ELEMENT = "</guid>";
/**
* Start pubDate element.
*/
private static final String START_PUB_DATE_ELEMENT = "<pubDate>";
/**
* End pubDate element.
*/
private static final String END_PUB_DATE_ELEMENT = "</pubDate>";
/**
* Guid.
*/
private String guid;
/**
* Publish date.
*/
private Date pubDate;
/**
* Title.
*/
private String title;
/**
* Description.
*/
private String description;
/**
* Link.
*/
private String link;
/**
* Author.
*/
private String author;
/**
* Gets the GUID.
*
* @return GUID
*/
public String getGUID() {
return guid;
}
/**
* Sets the GUID with the specified GUID.
*
* @param guid the specified GUID
*/
public void setGUID(final String guid) {
this.guid = guid;
}
/**
* Gets the author.
*
* @return author
*/
public String getAuthor() {
return author;
}
/**
* Sets the author with the specified author.
*
* @param author the specified author
*/
public void setAuthor(final String author) {
this.author = author;
}
/**
* Gets the link.
*
* @return link
*/
public String getLink() {
return link;
}
/**
* Sets the link with the specified link.
*
* @param link the specified link
*/
public void setLink(final String link) {
this.link = link;
}
/**
* Gets the title.
*
* @return title
*/
public String getTitle() {
return title;
}
/**
* Sets the title with the specified title.
*
* @param title the specified title
*/
public void setTitle(final String title) {
this.title = title;
}
/**
* Gets publish date.
*
* @return publish date
*/
public Date getPubDate() {
return pubDate;
}
/**
* Sets the publish date with the specified publish date.
*
* @param pubDate the specified publish date
*/
public void setPubDate(final Date pubDate) {
this.pubDate = pubDate;
}
/**
* Gets the description.
*
* @return description
*/
public String getDescription() {
return description;
}
/**
* Sets the description with the specified description.
*
* @param description the specified description
*/
public void setDescription(final String description) {
this.description = description;
}
/**
* Adds the specified category.
*
* @param category the specified category
*/
public void addCatetory(final RSSCategory category) {
categories.add(category);
}
@Override
public String toString() {
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("<item>").append(START_TITLE_ELEMENT);
stringBuilder.append(StringEscapeUtils.escapeXml(title));
stringBuilder.append(END_TITLE_ELEMENT);
stringBuilder.append(START_LINK_ELEMENT);
stringBuilder.append(StringEscapeUtils.escapeXml(link));
stringBuilder.append(END_LINK_ELEMENT);
stringBuilder.append(START_DESCRIPTION_ELEMENT);
stringBuilder.append("<![CDATA[" + description + "]]>");
stringBuilder.append(END_DESCRIPTION_ELEMENT);
stringBuilder.append(START_AUTHOR_ELEMENT);
stringBuilder.append(StringEscapeUtils.escapeXml(author));
stringBuilder.append(END_AUTHOR_ELEMENT);
stringBuilder.append(START_GUID_ELEMENT);
stringBuilder.append(StringEscapeUtils.escapeXml(guid));
stringBuilder.append(END_GUID_ELEMENT);
for (final RSSCategory category : categories) {
stringBuilder.append(category.toString());
}
stringBuilder.append(START_PUB_DATE_ELEMENT);
stringBuilder.append(DateFormatUtils.format(pubDate, "EEE, dd MMM yyyy HH:mm:ss Z", Locale.US));
stringBuilder.append(END_PUB_DATE_ELEMENT).append("</item>");
return stringBuilder.toString();
}
}
| |
//
// $Id: Coordinates.java 21291 2009-07-29 16:23:21Z swalker $
//
package edu.gemini.skycalc;
import java.text.ParseException;
import static edu.gemini.skycalc.Angle.Unit.DEGREES;
/**
* An RA/Dec pair.
*/
public final class Coordinates {
private final Angle _ra;
private final Angle _dec;
public static Coordinates create(String raStr, String decStr)
throws ParseException {
double ra = HHMMSS.parse(raStr).toDegrees().getMagnitude();
double dec = DDMMSS.parse(decStr).toDegrees().getMagnitude();
return new Coordinates(ra, dec);
}
public Coordinates(double ra, double dec) {
this(new Angle(ra, DEGREES), new Angle(dec, DEGREES));
}
public Coordinates(Angle ra, Angle dec) {
_ra = ra.toDegrees();
_dec = dec.toDegrees();
}
public Angle getRa() {
return _ra;
}
public Angle getDec() {
return _dec;
}
public double getRaDeg() {
return _ra.getMagnitude();
}
public double getDecDeg() {
return _dec.getMagnitude();
}
public boolean equals(Object other) {
if (other == null) return false;
if (other.getClass() != getClass()) return false;
Coordinates that = (Coordinates) other;
if (!_ra.equals(that._ra)) return false;
return _dec.equals(that._dec);
}
public int hashCode() {
int res = _ra.hashCode();
res = res*37 + _dec.hashCode();
return res;
}
public String toString() {
return HHMMSS.valStr(_ra.getMagnitude()) + " " +
DDMMSS.valStr(_dec.getMagnitude());
}
/**
* Converts a cartesian coordinate triplet back to a standard ra and dec.
*/
public static Coordinates xyzToCoordinates(double x, double y, double z) {
// skycal: xyz_cel
double mod; // modulus
double xy; // component in xy plane
double radian_ra, radian_dec;
// this taken directly from pl1 routine - no acos or asin available there,
// as it is in c. Easier just to copy, though
mod = Math.sqrt(x*x + y*y + z*z);
x = x / mod;
y = y / mod;
z = z / mod; // normalize 'em explicitly first.
xy = Math.sqrt(x*x + y*y);
if (xy < 1.0e-10) {
radian_ra = 0.; // too close to pole
radian_dec = Math.PI / 2.;
if (z < 0.) radian_dec = radian_dec * -1.;
} else {
if (Math.abs(z/xy) < 3.0) {
radian_dec = Math.atan(z / xy);
} else if (z >= 0.) {
radian_dec = Math.PI / 2. - Math.atan(xy / z);
} else {
radian_dec = -1. * Math.PI / 2. - Math.atan(xy / z);
}
if (Math.abs(x) > 1.0e-10) {
if (Math.abs(y / x) < 3.0) {
radian_ra = Math.atan(y/x);
} else if ((x * y ) >= 0.0) {
radian_ra = Math.PI / 2. - Math.atan(x/y);
} else {
radian_ra = -1.0 * Math.PI / 2. - Math.atan(x / y);
}
} else {
radian_ra = Math.PI / 2.;
if((x * y)<= 0.) radian_ra = radian_ra * -1.;
}
if (x <0.0) {
radian_ra = radian_ra + Math.PI;
}
if (radian_ra < 0.0) {
radian_ra = radian_ra + 2.0 * Math.PI;
}
}
double ra = radian_ra * ImprovedSkyCalcMethods.HRS_IN_RADIAN;
double dec = radian_dec * ImprovedSkyCalcMethods.DEG_IN_RADIAN;
return new Coordinates(ra, dec);
}
/**
* Takes a coordinate pair and precesses it using matrix procedures
* as outlined in Taff's Computational Spherical Astronomy book.
* This is the so-called 'rigorous' method which should give very
* accurate answers all over the sky over an interval of several
* centuries. Naked eye accuracy holds to ancient times, too.
* Precession constants used are the new IAU1976 -- the 'J2000'
* system.
*/
public Coordinates precess(double orig_epoch, double final_epoch) {
// skycalc: precrot
double ti, tf, zeta, z, theta; /* all as per Taff */
double cosz, coszeta, costheta, sinz, sinzeta, sintheta; /* ftns */
double p11, p12, p13, p21, p22, p23, p31, p32, p33;
/* elements of the rotation matrix */
double radian_ra, radian_dec;
double orig_x, orig_y, orig_z;
double fin_x, fin_y, fin_z; /* original and final unit ectors */
ti = (orig_epoch - 2000.) / 100.;
tf = (final_epoch - 2000. - 100. * ti) / 100.;
zeta = (2306.2181 + 1.39656 * ti + 0.000139 * ti * ti) * tf +
(0.30188 - 0.000344 * ti) * tf * tf + 0.017998 * tf * tf * tf;
z = zeta + (0.79280 + 0.000410 * ti) * tf * tf + 0.000205 * tf * tf * tf;
theta = (2004.3109 - 0.8533 * ti - 0.000217 * ti * ti) * tf
- (0.42665 + 0.000217 * ti) * tf * tf - 0.041833 * tf * tf * tf;
/* convert to radians */
zeta = zeta / ImprovedSkyCalcMethods.ARCSEC_IN_RADIAN;
z = z / ImprovedSkyCalcMethods.ARCSEC_IN_RADIAN;
theta = theta / ImprovedSkyCalcMethods.ARCSEC_IN_RADIAN;
/* compute the necessary trig functions for speed and simplicity */
cosz = Math.cos(z);
coszeta = Math.cos(zeta);
costheta = Math.cos(theta);
sinz = Math.sin(z);
sinzeta = Math.sin(zeta);
sintheta = Math.sin(theta);
/* compute the elements of the precession matrix */
p11 = coszeta * cosz * costheta - sinzeta * sinz;
p12 = -1. * sinzeta * cosz * costheta - coszeta * sinz;
p13 = -1. * cosz * sintheta;
p21 = coszeta * sinz * costheta + sinzeta * cosz;
p22 = -1. * sinzeta * sinz * costheta + coszeta * cosz;
p23 = -1. * sinz * sintheta;
p31 = coszeta * sintheta;
p32 = -1. * sinzeta * sintheta;
p33 = costheta;
/* transform original coordinates */
radian_ra = getRaDeg() / ImprovedSkyCalcMethods.HRS_IN_RADIAN;
radian_dec = getDecDeg() / ImprovedSkyCalcMethods.DEG_IN_RADIAN;
orig_x = Math.cos(radian_dec) * Math.cos(radian_ra);
orig_y = Math.cos(radian_dec) * Math.sin(radian_ra);
orig_z = Math.sin(radian_dec);
/* (hard coded matrix multiplication ...) */
fin_x = p11 * orig_x + p12 * orig_y + p13 * orig_z;
fin_y = p21 * orig_x + p22 * orig_y + p23 * orig_z;
fin_z = p31 * orig_x + p32 * orig_y + p33 * orig_z;
/* convert back to spherical polar coords */
return xyzToCoordinates(fin_x, fin_y, fin_z);
}
}
| |
package cc.softwarefactory.lokki.android.activities;
import android.app.Activity;
import android.app.ListActivity;
import android.content.Context;
import android.content.Intent;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import com.androidquery.AQuery;
import com.androidquery.callback.AjaxCallback;
import com.androidquery.callback.AjaxStatus;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import cc.softwarefactory.lokki.android.MainApplication;
import cc.softwarefactory.lokki.android.R;
import cc.softwarefactory.lokki.android.fragments.MapViewFragment;
import cc.softwarefactory.lokki.android.models.Contact;
import cc.softwarefactory.lokki.android.models.Place;
import cc.softwarefactory.lokki.android.services.ContactService;
/**
* An activity for performing searches and displaying their results
*/
public class SearchActivity extends ListActivity {
// A list of search results
ArrayList<SearchResult> resultList;
private Context context;
// The debug tag for this activity
public static final String TAG = "SearchActivity";
// The string used to fetch the search query from the launching intent
public final static String QUERY_MESSAGE = "SEARCH_QUERY";
// URL to public Google Maps geocoding API
public static final String GOOGLE_MAPS_API_URL = "http://maps.googleapis.com/maps/api/geocode/json?address=";
//List adapter for the results list view
private ArrayAdapter<SearchResult> adapter;
// Icons need to be resized to fit buttons, so cache them to prevent them from hogging the UI thread
private Drawable contactIcon = null;
private Drawable placeIcon = null;
private Drawable mapIcon = null;
private ContactService contactService;
/**
* The different possible result types:
* CONTACT: This result is a contact
* PLACE: This result is a user-defined place
* GOOGLE_LOCATION: This result is a Google maps location
*/
public enum ResultType {CONTACT, PLACE, GOOGLE_LOCATION}
/**
* Helper class for storing and manipulating search results
*/
public class SearchResult{
//The type of the result
public final ResultType type;
//The string that is used to represent this result to the user
public final String displayName;
//Additional metadata used to show this result on the map
public final String extraData;
/**
* Create a new search result
* @param type The ResultType of this result
* @param displayName The string that is used to represent this result to the user
* @param extraData Additional metadata used to show this result on the map
*/
SearchResult(ResultType type, String displayName, String extraData){
this.type = type;
this.displayName = displayName;
this.extraData = extraData;
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d(TAG, "searchActivityOnCreate");
setContentView(R.layout.activity_search);
context = getApplicationContext();
contactService = new ContactService(context);
Intent intent = getIntent();
//Get the user's query from the intent
String queryMessage = intent.getStringExtra(QUERY_MESSAGE);
Log.d(TAG, "User searched for: " + queryMessage);
resultList = new ArrayList<>();
setHeader(queryMessage);
setListAdapter(this);
new PerformSearch().execute(queryMessage);
Log.d(TAG, "end of onCreate");
}
//------------------Background tasks------------------
/**
* Helper class to perform the search in an asynchronous background task.
* (Prevents the search from hogging the UI thread)
*/
private class PerformSearch extends AsyncTask<String, Void, String> {
// Store search results in a temporary array list to avoid modifying resultList from background thread.
private ArrayList<SearchResult> tempResults;
@Override
protected String doInBackground(String... query) {
if (query.length < 1){
Log.w(TAG, "No search parameters");
return null;
}
tempResults = new ArrayList<>();
String queryMessage = query[0];
queryMessage = queryMessage.toLowerCase();
//Perform searches
searchContacts(queryMessage, tempResults);
searchPlaces(queryMessage, tempResults);
return queryMessage;
}
@Override
protected void onPostExecute(String query) {
resultList.addAll(tempResults);
//Show the results
adapter.notifyDataSetChanged();
setHeader(query);
//Start Google Maps search (separate task so that we can show local results before online search finishes)
new AddressSearch().execute(query);
Log.d(TAG, "end of performSearch");
}
}
/**
* Helper class for performing a Google Maps search
*/
private class AddressSearch extends AsyncTask<String, Void, String>{
@Override
protected String doInBackground(String... query) {
if (query.length < 1){
Log.w(TAG, "No search parameters");
return null;
}
String queryMessage = query[0];
searchGoogleMaps(queryMessage);
return queryMessage;
}
@Override
protected void onPostExecute(String query) {
Log.d(TAG, "end of addressSearch");
}
}
//------------------Search Methods------------------
/**
* Attempts to find a match from contact names or emails.
* @param query The string being searched
*/
protected void searchContacts(String query, ArrayList<SearchResult> resultList)
{
for (Contact contact : contactService.getContactsVisibleToMe()) {
if (contact.getEmail().toLowerCase().contains(query.toLowerCase()) || (contact.getName() != null && contact.getName().toLowerCase().contains(query.toLowerCase()))) {
//Display either name or email depending on whether a name exists
//Store contact data in the result's extra data for easy access
resultList.add(new SearchResult(ResultType.CONTACT, contact.toString(), contact.getEmail()));
}
}
}
/**
* Attempts to find a match from place names.
* @param query The string being searched
*/
protected void searchPlaces(String query, ArrayList<SearchResult> resultList){
if (MainApplication.places == null){
//Do nothing if there are no places
return;
}
// Loop through all user places
try {
Log.d(TAG, new ObjectMapper().writeValueAsString(MainApplication.places));
} catch (JsonProcessingException e) {
Log.e(TAG, "Serializing places to JSON failed");
e.printStackTrace();
}
for (Place place : MainApplication.places) {
try {
String name = place.getName();
Log.d(TAG, "place: " + name);
if (name.toLowerCase().contains(query)){
//Store place coordinates in the result's extra data for easy access
String coords = place.getLocation().getLat() + "," + place.getLocation().getLon();
resultList.add(new SearchResult(ResultType.PLACE, name, coords));
}
} catch (Exception e) {
Log.e(TAG, "Error parsing places: " + e);
}
}
}
/**
* Sends a search request to Google Maps and shows the results
* @param query The string being searched
*/
private void searchGoogleMaps(final String query){
final AQuery aq = new AQuery(this);
String url = GOOGLE_MAPS_API_URL + query;
AjaxCallback<JSONObject> cb = new AjaxCallback<JSONObject>(){
@Override
public void callback(String url, JSONObject json, AjaxStatus status) {
if (status.getError() != null){
Log.e(TAG, "Error accessing Google Maps API: " + status.getError());
return;
}
//TODO: Move result parsing to an async task
try {
//Loop through the results in the Google Maps response
JSONArray results = json.getJSONArray("results");
if (results.length() > 0)
for (int i = 0; i < results.length(); i++){
//Create a SearchResult from each result in the response
resultList.add(parseGoogleMapsResult(results.getJSONObject(i)));
}
} catch (JSONException e){
Log.e(TAG, "Error parsing Google Maps JSON: " + e);
}
Log.d(TAG, "Geocoding result: " + json.toString());
//Show the results
adapter.notifyDataSetChanged();
setHeader(query);
}
};
aq.ajax(url, JSONObject.class, cb);
}
//------------------Private helper methods------------------
/**
* Converts the list of results into a list of buttons visible to the user
* @param listActivity The activity containing the list
*/
private void setListAdapter(final Activity listActivity)
{
Log.d(TAG, "setListAdapter");
adapter = new ArrayAdapter<SearchResult>(context,R.layout.listresult_layout, resultList){
public View getView(int position, View unusedView, ViewGroup parent)
{
View convertView = getLayoutInflater().inflate(R.layout.listresult_layout, parent, false);
AQuery aq = new AQuery(listActivity,convertView);
final SearchResult clickedResult = resultList.get(position);
String buttonLabel = clickedResult.displayName;
// Configure search result button
aq.id(R.id.list_result).text(buttonLabel).clicked(new View.OnClickListener() {
@Override
public void onClick(View view) {
Log.d(TAG, "user clicked search result");
switch (clickedResult.type) {
case CONTACT: {
Log.d(TAG, "result type: contact");
//Set app to track contact's email
MainApplication.emailBeingTracked = clickedResult.extraData;
finish();
break;
}
case PLACE:
Log.d(TAG, "result type: place");
//Maps locations and places have the same behavior, so fall through
case GOOGLE_LOCATION: {
//Recheck the result type in case we fell through from PLACE
if (clickedResult.type == ResultType.GOOGLE_LOCATION)
Log.d(TAG, "result type: Google Maps location");
//Broadcast back place coordinates
Intent intent = new Intent(MapViewFragment.BROADCAST_GO_TO);
intent.putExtra(MapViewFragment.GO_TO_COORDS, clickedResult.extraData);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
//Go back to the map
finish();
break;
}
default: {
Log.d(TAG, "invalid search type");
//Close the search
finish();
}
}
}
});
final Button resultButton = aq.id(R.id.list_result).getButton();
//Set button icons in a callback so that the button already exists and we can find out its size
resultButton.addOnLayoutChangeListener(new View.OnLayoutChangeListener(){
@Override
public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom){
setButtonIcon(resultButton, clickedResult);
//Only set the icon once
resultButton.removeOnLayoutChangeListener(this);
}
});
return convertView;
}
};
getListView().setAdapter(adapter);
}
/**
* Set a button icon depending on the type of result
* @param btn The button
* @param res The search result associated with the button
*/
private void setButtonIcon(Button btn, SearchResult res){
Drawable icon;
switch (res.type){
case CONTACT:{
//If the icon has already been scaled, use the cached icon
if (contactIcon == null){
//If the icon hasn't been scaled, scale and cache it
contactIcon = scaleIconToButton(btn, R.drawable.ic_people_grey600_48dp);
}
icon = contactIcon;
break;
}
case PLACE:{
if (placeIcon == null){
placeIcon = scaleIconToButton(btn, R.drawable.ic_place_grey600_48dp);
}
icon = placeIcon;
break;
}
case GOOGLE_LOCATION:{
if (mapIcon == null){
mapIcon = scaleIconToButton(btn, R.drawable.ic_map_grey600_48dp);
}
icon = mapIcon;
break;
}
default:{
//Unknown result type, do nothing
return;
}
}
//Set the scaled icon
btn.setCompoundDrawables(icon, null, null, null);
}
/**
* Scales icons down to fit search result buttons
* @param btn The button that will contain the icon
* @param icon The resource ID of the icon to be shown on the button
* @return The icon scaled down to fit the button
*/
private Drawable scaleIconToButton(Button btn, int icon){
Log.d(TAG, "Setting button icon");
Drawable drawable;
//getResources().getDrawable is deprecated, but context.getDrawable doesn't work below API level 21
if(android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP){
drawable = context.getDrawable(icon);
} else {
drawable = getResources().getDrawable(icon);
}
if (drawable == null){
Log.e(TAG, "Could not find drawable resource " + icon);
return null;
}
//Scale the drawable if it's too big
//"Too big" = forces app to resize button
if (drawable.getIntrinsicHeight() > btn.getHeight() / 2){
Log.d(TAG, "Intrinsic size: " + drawable.getIntrinsicWidth() + "x" + drawable.getIntrinsicHeight());
int newHeight = btn.getHeight() / 2;
int newWidth = (int)(drawable.getIntrinsicWidth() * ((float)newHeight / (float)drawable.getIntrinsicHeight()));
drawable.setBounds(0, 0, newWidth, newHeight);
Log.d(TAG, "new drawable size: " + newWidth + "x" + newHeight);
} else {
//Call setBounds so that setCompoundDrawables can be called safely
drawable.setBounds(0, 0, drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight());
}
return drawable;
}
/**
* Sets the search page header depending on whether or not there are search results to display
* @param query The string that was searched for
*/
private void setHeader(String query){
AQuery aq = new AQuery(this);
String headerText = getString(R.string.search_results) + " " + query;
if (resultList.size() < 1){
headerText = getString(R.string.no_search_results);
}
aq.id(R.id.search_header).text(headerText);
}
/**
* Generates a new Google Maps SearchResult from a single result JSON object contained in a Google Maps geocoding result
* @param locationObject The JSON object containing a single Google Maps geocoding search result
* @return A SearchResult object pointing to the location of the Google Maps result
* @throws JSONException If the Google Maps result is malformed
*/
private SearchResult parseGoogleMapsResult(JSONObject locationObject) throws JSONException {
String name = locationObject.getString("formatted_address");
JSONObject coords = locationObject.getJSONObject("geometry").getJSONObject("location");
String coordString = coords.getString("lat") + "," + coords.getString("lng");
return new SearchResult(ResultType.GOOGLE_LOCATION, name, coordString);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.processing.sortandgroupby.sortdata;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.IgnoreDictionary;
import org.apache.carbondata.core.util.ByteUtil.UnsafeComparer;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException;
import org.apache.carbondata.processing.util.NonDictionaryUtil;
public class SortTempFileChunkHolder implements Comparable<SortTempFileChunkHolder> {
/**
* LOGGER
*/
private static final LogService LOGGER =
LogServiceFactory.getLogService(SortTempFileChunkHolder.class.getName());
/**
* temp file
*/
private File tempFile;
/**
* read stream
*/
private DataInputStream stream;
/**
* entry count
*/
private int entryCount;
/**
* number record read
*/
private int numberOfObjectRead;
/**
* return row
*/
private Object[] returnRow;
/**
* number of measures
*/
private int measureCount;
/**
* number of dimensionCount
*/
private int dimensionCount;
/**
* number of complexDimensionCount
*/
private int complexDimensionCount;
/**
* fileBufferSize for file reader stream size
*/
private int fileBufferSize;
private Object[][] currentBuffer;
private Object[][] backupBuffer;
private boolean isBackupFilled;
private boolean prefetch;
private int bufferSize;
private int bufferRowCounter;
private ExecutorService executorService;
private Future<Void> submit;
private int prefetchRecordsProceesed;
/**
* sortTempFileNoOFRecordsInCompression
*/
private int sortTempFileNoOFRecordsInCompression;
/**
* isSortTempFileCompressionEnabled
*/
private boolean isSortTempFileCompressionEnabled;
/**
* totalRecordFetch
*/
private int totalRecordFetch;
private int noDictionaryCount;
private char[] aggType;
/**
* to store whether dimension is of dictionary type or not
*/
private boolean[] isNoDictionaryDimensionColumn;
// TODO temporary configuration, remove after kettle removal
private boolean useKettle;
/**
* Constructor to initialize
*
* @param tempFile
* @param dimensionCount
* @param complexDimensionCount
* @param measureCount
* @param fileBufferSize
* @param noDictionaryCount
* @param aggType
* @param isNoDictionaryDimensionColumn
*/
public SortTempFileChunkHolder(File tempFile, int dimensionCount, int complexDimensionCount,
int measureCount, int fileBufferSize, int noDictionaryCount, char[] aggType,
boolean[] isNoDictionaryDimensionColumn, boolean useKettle) {
// set temp file
this.tempFile = tempFile;
// set measure and dimension count
this.measureCount = measureCount;
this.dimensionCount = dimensionCount;
this.complexDimensionCount = complexDimensionCount;
this.noDictionaryCount = noDictionaryCount;
// set mdkey length
this.fileBufferSize = fileBufferSize;
this.executorService = Executors.newFixedThreadPool(1);
this.aggType = aggType;
this.isNoDictionaryDimensionColumn = isNoDictionaryDimensionColumn;
this.useKettle = useKettle;
}
/**
* This method will be used to initialize
*
* @throws CarbonSortKeyAndGroupByException problem while initializing
*/
public void initialize() throws CarbonSortKeyAndGroupByException {
prefetch = Boolean.parseBoolean(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_MERGE_SORT_PREFETCH,
CarbonCommonConstants.CARBON_MERGE_SORT_PREFETCH_DEFAULT));
bufferSize = Integer.parseInt(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE,
CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE_DEFAULT));
this.isSortTempFileCompressionEnabled = Boolean.parseBoolean(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.IS_SORT_TEMP_FILE_COMPRESSION_ENABLED,
CarbonCommonConstants.IS_SORT_TEMP_FILE_COMPRESSION_ENABLED_DEFAULTVALUE));
if (this.isSortTempFileCompressionEnabled) {
LOGGER.info("Compression was used while writing the sortTempFile");
}
try {
this.sortTempFileNoOFRecordsInCompression = Integer.parseInt(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORDS_FOR_COMPRESSION,
CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORD_FOR_COMPRESSION_DEFAULTVALUE));
if (this.sortTempFileNoOFRecordsInCompression < 1) {
LOGGER.error("Invalid value for: "
+ CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORDS_FOR_COMPRESSION
+ ": Only Positive Integer value(greater than zero) is allowed.Default value will"
+ " be used");
this.sortTempFileNoOFRecordsInCompression = Integer.parseInt(
CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORD_FOR_COMPRESSION_DEFAULTVALUE);
}
} catch (NumberFormatException e) {
LOGGER.error(
"Invalid value for: " + CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORDS_FOR_COMPRESSION
+ ", only Positive Integer value is allowed.Default value will be used");
this.sortTempFileNoOFRecordsInCompression = Integer
.parseInt(CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORD_FOR_COMPRESSION_DEFAULTVALUE);
}
initialise();
}
private void initialise() throws CarbonSortKeyAndGroupByException {
try {
if (isSortTempFileCompressionEnabled) {
this.bufferSize = sortTempFileNoOFRecordsInCompression;
}
stream = new DataInputStream(
new BufferedInputStream(new FileInputStream(tempFile), this.fileBufferSize));
this.entryCount = stream.readInt();
if (prefetch) {
new DataFetcher(false).call();
totalRecordFetch += currentBuffer.length;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
} else {
if (isSortTempFileCompressionEnabled) {
new DataFetcher(false).call();
}
}
} catch (FileNotFoundException e) {
LOGGER.error(e);
throw new CarbonSortKeyAndGroupByException(tempFile + " No Found", e);
} catch (IOException e) {
LOGGER.error(e);
throw new CarbonSortKeyAndGroupByException(tempFile + " No Found", e);
} catch (Exception e) {
LOGGER.error(e);
throw new CarbonSortKeyAndGroupByException(tempFile + " Problem while reading", e);
}
}
/**
* This method will be used to read new row from file
*
* @throws CarbonSortKeyAndGroupByException problem while reading
*/
public void readRow() throws CarbonSortKeyAndGroupByException {
if (prefetch) {
fillDataForPrefetch();
} else if (isSortTempFileCompressionEnabled) {
if (bufferRowCounter >= bufferSize) {
try {
new DataFetcher(false).call();
bufferRowCounter = 0;
} catch (Exception e) {
LOGGER.error(e);
throw new CarbonSortKeyAndGroupByException(tempFile + " Problem while reading", e);
}
}
prefetchRecordsProceesed++;
returnRow = currentBuffer[bufferRowCounter++];
} else {
Object[] outRow = getRowFromStream();
this.returnRow = outRow;
}
}
private void fillDataForPrefetch() {
if (bufferRowCounter >= bufferSize) {
if (isBackupFilled) {
bufferRowCounter = 0;
currentBuffer = backupBuffer;
totalRecordFetch += currentBuffer.length;
isBackupFilled = false;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
} else {
try {
submit.get();
} catch (Exception e) {
LOGGER.error(e);
}
bufferRowCounter = 0;
currentBuffer = backupBuffer;
isBackupFilled = false;
totalRecordFetch += currentBuffer.length;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
}
}
prefetchRecordsProceesed++;
returnRow = currentBuffer[bufferRowCounter++];
}
/**
* @return
* @throws CarbonSortKeyAndGroupByException
*/
private Object[] getRowFromStream() throws CarbonSortKeyAndGroupByException {
// create new row of size 3 (1 for dims , 1 for high card , 1 for measures)
if (useKettle) {
return getRowFromStreamWithKettle();
} else {
return getRowFromStreamWithOutKettle();
}
}
// TODO remove after kettle flow is removed
private Object[] getRowFromStreamWithKettle() throws CarbonSortKeyAndGroupByException {
Object[] holder = new Object[3];
int index = 0;
Integer[] dim = new Integer[this.dimensionCount];
Object[] measures = new Object[this.measureCount];
byte[] finalByteArr = null;
try {
// read dimension values
for (int i = 0; i < this.dimensionCount; i++) {
dim[index++] = stream.readInt();
}
if ((this.noDictionaryCount + this.complexDimensionCount) > 0) {
short lengthOfByteArray = stream.readShort();
ByteBuffer buff = ByteBuffer.allocate(lengthOfByteArray + 2);
buff.putShort(lengthOfByteArray);
byte[] byteArr = new byte[lengthOfByteArray];
stream.readFully(byteArr);
buff.put(byteArr);
finalByteArr = buff.array();
}
index = 0;
// read measure values
for (int i = 0; i < this.measureCount; i++) {
if (stream.readByte() == 1) {
if (aggType[i] == CarbonCommonConstants.SUM_COUNT_VALUE_MEASURE) {
measures[index++] = stream.readDouble();
} else if (aggType[i] == CarbonCommonConstants.BIG_INT_MEASURE) {
measures[index++] = stream.readLong();
} else {
int len = stream.readInt();
byte[] buff = new byte[len];
stream.readFully(buff);
measures[index++] = buff;
}
} else {
measures[index++] = null;
}
}
NonDictionaryUtil.prepareOutObj(holder, dim, finalByteArr, measures);
// increment number if record read
this.numberOfObjectRead++;
} catch (IOException e) {
LOGGER.error("Problme while reading the madkey fom sort temp file");
throw new CarbonSortKeyAndGroupByException("Problem while reading the sort temp file ", e);
}
//return out row
return holder;
}
/**
* Reads row from file
* @return Object[]
* @throws CarbonSortKeyAndGroupByException
*/
private Object[] getRowFromStreamWithOutKettle() throws CarbonSortKeyAndGroupByException {
// create new row of size 3 (1 for dims , 1 for high card , 1 for measures)
Object[] holder = new Object[3];
int index = 0;
int nonDicIndex = 0;
int[] dim = new int[this.dimensionCount];
byte[][] nonDicArray = new byte[this.noDictionaryCount + this.complexDimensionCount][];
Object[] measures = new Object[this.measureCount];
try {
// read dimension values
for (int i = 0; i < isNoDictionaryDimensionColumn.length; i++) {
if (isNoDictionaryDimensionColumn[i]) {
short len = stream.readShort();
byte[] array = new byte[len];
stream.readFully(array);
nonDicArray[nonDicIndex++] = array;
} else {
dim[index++] = stream.readInt();
}
}
for (int i = 0; i < complexDimensionCount; i++) {
short len = stream.readShort();
byte[] array = new byte[len];
stream.readFully(array);
nonDicArray[nonDicIndex++] = array;
}
index = 0;
// read measure values
for (int i = 0; i < this.measureCount; i++) {
if (stream.readByte() == 1) {
if (aggType[i] == CarbonCommonConstants.SUM_COUNT_VALUE_MEASURE) {
measures[index++] = stream.readDouble();
} else if (aggType[i] == CarbonCommonConstants.BIG_INT_MEASURE) {
measures[index++] = stream.readLong();
} else {
int len = stream.readInt();
byte[] buff = new byte[len];
stream.readFully(buff);
measures[index++] = buff;
}
} else {
measures[index++] = null;
}
}
NonDictionaryUtil.prepareOutObj(holder, dim, nonDicArray, measures);
// increment number if record read
this.numberOfObjectRead++;
} catch (IOException e) {
LOGGER.error("Problme while reading the madkey fom sort temp file");
throw new CarbonSortKeyAndGroupByException("Problem while reading the sort temp file ", e);
}
//return out row
return holder;
}
/**
* below method will be used to get the row
*
* @return row
*/
public Object[] getRow() {
return this.returnRow;
}
/**
* below method will be used to check whether any more records are present
* in file or not
*
* @return more row present in file
*/
public boolean hasNext() {
if (prefetch || isSortTempFileCompressionEnabled) {
return this.prefetchRecordsProceesed < this.entryCount;
}
return this.numberOfObjectRead < this.entryCount;
}
/**
* Below method will be used to close streams
*/
public void closeStream() {
CarbonUtil.closeStreams(stream);
executorService.shutdown();
this.backupBuffer = null;
this.currentBuffer = null;
}
/**
* This method will number of entries
*
* @return entryCount
*/
public int getEntryCount() {
return entryCount;
}
@Override public int compareTo(SortTempFileChunkHolder other) {
if (useKettle) {
return compareWithKettle(other);
} else {
return compareWithOutKettle(other);
}
}
// TODO Remove after kettle flow is removed.
private int compareWithKettle(SortTempFileChunkHolder other) {
int diff = 0;
int normalIndex = 0;
int noDictionaryindex = 0;
for (boolean isNoDictionary : isNoDictionaryDimensionColumn) {
if (isNoDictionary) {
byte[] byteArr1 = (byte[]) returnRow[IgnoreDictionary.BYTE_ARRAY_INDEX_IN_ROW.getIndex()];
ByteBuffer buff1 = ByteBuffer.wrap(byteArr1);
// extract a high card dims from complete byte[].
NonDictionaryUtil
.extractSingleHighCardDims(byteArr1, noDictionaryindex, noDictionaryCount, buff1);
byte[] byteArr2 =
(byte[]) other.returnRow[IgnoreDictionary.BYTE_ARRAY_INDEX_IN_ROW.getIndex()];
ByteBuffer buff2 = ByteBuffer.wrap(byteArr2);
// extract a high card dims from complete byte[].
NonDictionaryUtil
.extractSingleHighCardDims(byteArr2, noDictionaryindex, noDictionaryCount, buff2);
int difference = UnsafeComparer.INSTANCE.compareTo(buff1, buff2);
if (difference != 0) {
return difference;
}
noDictionaryindex++;
} else {
int dimFieldA = NonDictionaryUtil.getDimension(normalIndex, returnRow);
int dimFieldB = NonDictionaryUtil.getDimension(normalIndex, other.returnRow);
diff = dimFieldA - dimFieldB;
if (diff != 0) {
return diff;
}
normalIndex++;
}
}
return diff;
}
private int compareWithOutKettle(SortTempFileChunkHolder other) {
int diff = 0;
int index = 0;
int noDictionaryIndex = 0;
int[] leftMdkArray = (int[]) returnRow[0];
int[] rightMdkArray = (int[]) other.returnRow[0];
byte[][] leftNonDictArray = (byte[][]) returnRow[1];
byte[][] rightNonDictArray = (byte[][]) other.returnRow[1];
for (boolean isNoDictionary : isNoDictionaryDimensionColumn) {
if (isNoDictionary) {
diff = UnsafeComparer.INSTANCE
.compareTo(leftNonDictArray[noDictionaryIndex], rightNonDictArray[noDictionaryIndex]);
if (diff != 0) {
return diff;
}
noDictionaryIndex++;
} else {
diff = leftMdkArray[index] - rightMdkArray[index];
if (diff != 0) {
return diff;
}
index++;
}
}
return diff;
}
@Override public boolean equals(Object obj) {
if (!(obj instanceof SortTempFileChunkHolder)) {
return false;
}
SortTempFileChunkHolder o = (SortTempFileChunkHolder) obj;
return o.compareTo(o) == 0;
}
@Override public int hashCode() {
int hash = 0;
hash += 31 * measureCount;
hash += 31 * dimensionCount;
hash += 31 * complexDimensionCount;
hash += 31 * noDictionaryCount;
hash += tempFile.hashCode();
return hash;
}
private final class DataFetcher implements Callable<Void> {
private boolean isBackUpFilling;
private int numberOfRecords;
private DataFetcher(boolean backUp) {
isBackUpFilling = backUp;
calculateNumberOfRecordsToBeFetched();
}
private void calculateNumberOfRecordsToBeFetched() {
int numberOfRecordsLeftToBeRead = entryCount - totalRecordFetch;
numberOfRecords =
bufferSize < numberOfRecordsLeftToBeRead ? bufferSize : numberOfRecordsLeftToBeRead;
}
@Override public Void call() throws Exception {
try {
if (isBackUpFilling) {
backupBuffer = prefetchRecordsFromFile(numberOfRecords);
isBackupFilled = true;
} else {
currentBuffer = prefetchRecordsFromFile(numberOfRecords);
}
} catch (Exception e) {
LOGGER.error(e);
}
return null;
}
}
/**
* This method will read the records from sort temp file and keep it in a buffer
*
* @param numberOfRecords
* @return
* @throws CarbonSortKeyAndGroupByException
*/
private Object[][] prefetchRecordsFromFile(int numberOfRecords)
throws CarbonSortKeyAndGroupByException {
Object[][] records = new Object[numberOfRecords][];
for (int i = 0; i < numberOfRecords; i++) {
records[i] = getRowFromStream();
}
return records;
}
}
| |
package com.bnorm.infinite.signals;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
/**
* A signal is a way of describing an event with some extra, optional, information. A state machine designer can use a
* signal to be able to handle when a fired event needs to contain more information then just the event itself. This
* can be useful in a lot of situations. For example, when a keyboard key is press, the signal could be the event that
* a key was pressed and the optional information could be what key.
*
* <p>To use this signal in a state machine, the event must be specified as being a signal of the specified event type
* and optional parameter type.
*
* <pre>{@code
* StateMachine<State, Signal<Event, Parameter>, Context> stateMachine;
* }</pre>
*
* This class wraps an event type with an Optional delegation. All signals with equal events are considered equal but
* all optional like methods are delegated to the specified parameter. One might think of this class like a keyed
* optional.
*
* <p>This class is not designed to be fully featured. It is only designed to give a developer an idea of how one might
* solve the problem of parameter passing with a state machine event. If more parameters are required, another signal
* like class should be created and used.
*
* @param <E> the class type of the events.
* @param <T> the class type of the signal value.
* @author Brian Norman
* @since 1.1.0
*/
public class Signal<E, T> {
/** The signal event */
private final E event;
/** The signal parameter value. */
private final Optional<T> optional;
/**
* Constructs a signal with the specified event.
*
* @param event the signal event.
*/
protected Signal(E event) {
this.event = event;
this.optional = Optional.empty();
}
/**
* Constructs a signal with the specified event and parameter value.
*
* @param event the signal event.
* @param value the signal parameter value.
*/
protected Signal(E event, T value) {
this.event = event;
this.optional = Optional.of(value);
}
/**
* Creates and returns a new signal with the specified event.
*
* @param event the signal event.
* @param <E> the class type of the events.
* @param <T> the class type of the signal value.
* @return a new event signal.
*/
public static <E, T> Signal<E, T> of(E event) {
return new Signal<>(event);
}
/**
* Creates and returns a new signal with the specified event and parameter value.
*
* @param event the signal event.
* @param value the signal parameter value.
* @param <E> the class type of the events.
* @param <T> the class type of the signal value.
* @return a new event signal.
*/
public static <E, T> Signal<E, T> of(E event, T value) {
return new Signal<>(event, value);
}
/**
* Returns the signal event.
*
* @return the signal event.
*/
public E getEvent() {
return event;
}
/**
* Delegation to {@link java.util.Optional#get()}.
*
* @return the non-null value held by this {@code Optional}
* @throws java.util.NoSuchElementException if there is no value present
* @see Signal#isPresent()
*/
public T get() {
return optional.get();
}
/**
* Delegation to {@link java.util.Optional#isPresent()}.
*
* @return {@code true} if there is a value present, otherwise {@code false}
*/
public boolean isPresent() {
return optional.isPresent();
}
/**
* Delegation to {@link java.util.Optional#ifPresent(java.util.function.Consumer)}.
*
* @param consumer block to be executed if a value is present
* @throws NullPointerException if value is present and {@code consumer} is null
*/
public void ifPresent(Consumer<? super T> consumer) {
optional.ifPresent(consumer);
}
/**
* Delegation to {@link java.util.Optional#filter(java.util.function.Predicate)}.
*
* @param predicate a predicate to apply to the value, if present
* @return an {@code Optional} describing the value of this {@code Optional} if a value is present and the value
* matches the given predicate, otherwise an empty {@code Optional}
* @throws NullPointerException if the predicate is null
*/
public Optional<T> filter(Predicate<? super T> predicate) {
return optional.filter(predicate);
}
/**
* Delegation to {@link java.util.Optional#map(java.util.function.Function)}.
*
* @param <U> The type of the result of the mapping function
* @param mapper a mapping function to apply to the value, if present
* @return an {@code Optional} describing the result of applying a mapping function to the value of this {@code
* Optional}, if a value is present, otherwise an empty {@code Optional}
* @throws NullPointerException if the mapping function is null
*/
public <U> Optional<U> map(Function<? super T, ? extends U> mapper) {
return optional.map(mapper);
}
/**
* Delegation to {@link java.util.Optional#flatMap(java.util.function.Function)}.
*
* @param <U> The type parameter to the {@code Optional} returned by
* @param mapper a mapping function to apply to the value, if present the mapping function
* @return the result of applying an {@code Optional}-bearing mapping function to the value of this {@code
* Optional}, if a value is present, otherwise an empty {@code Optional}
* @throws NullPointerException if the mapping function is null or returns a null result
*/
public <U> Optional<U> flatMap(Function<? super T, Optional<U>> mapper) {
return optional.flatMap(mapper);
}
/**
* Delegation to {@link java.util.Optional#orElse(Object)}.
*
* @param other the value to be returned if there is no value present, may be null
* @return the value, if present, otherwise {@code other}
*/
public T orElse(T other) {
return optional.orElse(other);
}
/**
* Delegation to {@link java.util.Optional#orElseGet(java.util.function.Supplier)}.
*
* @param other a {@code Supplier} whose result is returned if no value is present
* @return the value if present otherwise the result of {@code other.get()}
* @throws NullPointerException if value is not present and {@code other} is null
*/
public T orElseGet(Supplier<? extends T> other) {
return optional.orElseGet(other);
}
/**
* Delegation to {@link java.util.Optional#orElseThrow(java.util.function.Supplier)}.
*
* @param <X> Type of the exception to be thrown
* @param exceptionSupplier The supplier which will return the exception to be thrown
* @return the present value
* @throws X if there is no value present
* @throws NullPointerException if no value is present and {@code exceptionSupplier} is null
*/
public <X extends Throwable> T orElseThrow(Supplier<? extends X> exceptionSupplier) throws X {
return optional.orElseThrow(exceptionSupplier);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (!(obj instanceof Signal)) {
return false;
} else {
Signal<?, ?> other = (Signal<?, ?>) obj;
return Objects.equals(event, other.event);
}
}
@Override
public int hashCode() {
return Objects.hashCode(event);
}
@Override
public String toString() {
if (optional.isPresent()) {
return "Signal[" + event + "," + optional + "]";
} else {
return "Signal[" + event + "]";
}
}
}
| |
// Copyright 2010 Google, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.oacurl;
import java.awt.Desktop;
import java.awt.Desktop.Action;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.oauth.OAuthAccessor;
import net.oauth.OAuthConsumer;
import net.oauth.OAuthProblemException;
import net.oauth.OAuthServiceProvider;
import net.oauth.client.OAuthClient;
import net.oauth.client.httpclient4.HttpClient4;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.ParseException;
import org.apache.http.conn.ssl.AllowAllHostnameVerifier;
import org.apache.http.conn.ssl.SSLSocketFactory;
import com.google.oacurl.LoginCallbackServer.TokenStatus;
import com.google.oacurl.dao.AccessorDao;
import com.google.oacurl.dao.ConsumerDao;
import com.google.oacurl.dao.ServiceProviderDao;
import com.google.oacurl.engine.OAuthEngine;
import com.google.oacurl.engine.V1OAuthEngine;
import com.google.oacurl.engine.V2OAuthEngine;
import com.google.oacurl.engine.WrapOAuthEngine;
import com.google.oacurl.options.LoginOptions;
import com.google.oacurl.options.OAuthVersion;
import com.google.oacurl.util.LoggingConfig;
import com.google.oacurl.util.OAuthUtil;
import com.google.oacurl.util.PropertiesProvider;
/**
* Main class for doing the initial OAuth dance to get an access token and
* secret.
*
* @author phopkins@google.com
*/
public class Login {
private static Logger logger = Logger.getLogger(Login.class.getName());
public static void main(String[] args) throws Exception {
LoginOptions options = new LoginOptions();
try {
options.parse(args);
} catch (ParseException e) {
System.err.println(e.getMessage());
System.exit(-1);
}
if (options.isHelp()) {
new HelpFormatter().printHelp(" ", options.getOptions());
System.exit(0);
}
if (options.isInsecure()) {
SSLSocketFactory.getSocketFactory().setHostnameVerifier(new AllowAllHostnameVerifier());
}
LoggingConfig.init(options.isVerbose());
if (options.isWirelogVerbose()) {
LoggingConfig.enableWireLog();
}
ServiceProviderDao serviceProviderDao = new ServiceProviderDao();
ConsumerDao consumerDao = new ConsumerDao(options);
AccessorDao accessorDao = new AccessorDao();
String serviceProviderFileName = options.getServiceProviderFileName();
if (serviceProviderFileName == null) {
if (options.isBuzz()) {
// Buzz has its own provider because it has a custom authorization URL
serviceProviderFileName = "BUZZ";
} else if (options.getVersion() == OAuthVersion.V2) {
serviceProviderFileName = "GOOGLE_V2";
} else {
serviceProviderFileName = "GOOGLE";
}
}
// We have a wee library of service provider properties files bundled into
// the resources, so we set up the PropertiesProvider to search for them
// if the file cannot be found.
OAuthServiceProvider serviceProvider = serviceProviderDao.loadServiceProvider(
new PropertiesProvider(serviceProviderFileName,
ServiceProviderDao.class, "services/").get());
OAuthConsumer consumer = consumerDao.loadConsumer(
new PropertiesProvider(options.getConsumerFileName()).get(), serviceProvider);
OAuthAccessor accessor = accessorDao.newAccessor(consumer);
OAuthClient client = new OAuthClient(new HttpClient4());
LoginCallbackServer callbackServer = null;
boolean launchedBrowser = false;
try {
if (!options.isNoServer()) {
callbackServer = new LoginCallbackServer(options);
callbackServer.start();
}
String callbackUrl;
if (options.getCallback() != null) {
callbackUrl = options.getCallback();
} else if (callbackServer != null) {
callbackUrl = callbackServer.getCallbackUrl();
} else {
callbackUrl = null;
}
OAuthEngine engine;
switch (options.getVersion()) {
case V1:
engine = new V1OAuthEngine();
break;
case V2:
engine = new V2OAuthEngine();
break;
case WRAP:
engine = new WrapOAuthEngine();
break;
default:
throw new IllegalArgumentException("Unknown version: " + options.getVersion());
}
do {
String authorizationUrl = engine.getAuthorizationUrl(client, accessor, options, callbackUrl);
if (!options.isNoServer()) {
callbackServer.setAuthorizationUrl(authorizationUrl);
}
if (!launchedBrowser) {
String url = options.isDemo() ? callbackServer.getDemoUrl() : authorizationUrl;
if (options.isNoBrowser()) {
System.out.println(url);
System.out.flush();
} else {
launchBrowser(options, url);
}
launchedBrowser = true;
}
accessor.accessToken = null;
logger.log(Level.INFO, "Waiting for verification token...");
String verifier;
if (options.isNoServer()) {
System.out.print("Verification token: ");
BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
verifier = "";
while (verifier.isEmpty()) {
String line = reader.readLine();
if (line == null) {
System.exit(-1);
}
verifier = line.trim();
}
} else {
verifier = callbackServer.waitForVerifier(accessor, -1);
if (verifier == null) {
System.err.println("Wait for verifier interrupted");
System.exit(-1);
}
}
logger.log(Level.INFO, "Verification token received: " + verifier);
boolean success = engine.getAccessToken(accessor, client, callbackUrl, verifier);
if (success) {
if (callbackServer != null) {
callbackServer.setTokenStatus(TokenStatus.VALID);
}
Properties loginProperties = new Properties();
accessorDao.saveAccessor(accessor, loginProperties);
consumerDao.saveConsumer(consumer, loginProperties);
loginProperties.put("oauthVersion", options.getVersion().toString());
new PropertiesProvider(options.getLoginFileName()).overwrite(loginProperties);
} else {
if (callbackServer != null) {
callbackServer.setTokenStatus(TokenStatus.INVALID);
}
}
} while (options.isDemo());
} catch (OAuthProblemException e) {
OAuthUtil.printOAuthProblemException(e);
} finally {
if (callbackServer != null) {
callbackServer.stop();
}
}
}
private static void launchBrowser(LoginOptions options,
String authorizationUrl) {
logger.log(Level.INFO, "Redirecting to URL: " + authorizationUrl);
boolean browsed = false;
if (options.getBrowser() == null) {
if (Desktop.isDesktopSupported()) {
Desktop desktop = Desktop.getDesktop();
if (desktop.isSupported(Action.BROWSE)) {
try {
desktop.browse(URI.create(authorizationUrl));
browsed = true;
} catch (IOException e) {
// In some situations "BROWSE" appears supported but throws an
// exception.
logger.log(Level.WARNING, "Error opening browser for Desktop#browse(String)",
options.isVerbose() ? e : null);
}
} else {
logger.log(Level.WARNING, "java.awt.Desktop BROWSE action not supported.");
}
} else {
logger.log(Level.WARNING, "java.awt.Desktop not supported. You should use Java 1.6.");
}
}
if (!browsed) {
String browser = options.getBrowser();
if (browser == null) {
browser = "google-chrome";
}
try {
Runtime.getRuntime().exec(new String[] { browser, authorizationUrl });
} catch (IOException e) {
logger.log(Level.SEVERE, "Error running browser: " + browser + ". " +
"Specify a browser with --browser or use --nobrowser to print URL.",
options.isVerbose() ? e : null);
System.exit(-1);
}
}
}
}
| |
/**
* Copyright 2001 Sun Microsystems, Inc.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*/
import com.sun.speech.freetts.audio.AudioPlayer;
import com.sun.speech.freetts.util.Utilities;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.Socket;
import javax.sound.sampled.AudioFormat;
/**
* Implements the AudioPlayer for the freetts Client/Server demo.
* This SocketAudioPlayer basically sends synthesized wave bytes to the
* client.
*/
public class SocketAudioPlayer implements AudioPlayer {
private AudioFormat audioFormat;
private Socket socket;
private DataOutputStream dataOutputStream;
private boolean debug = false;
private int bytesToPlay = 0;
private int bytesPlayed = 0;
private boolean firstByteSent = false;
private long firstByteTime = -1;
/**
* Constructs a SocketAudioPlayer that will send wave bytes to the
* given Socket.
*
* @param socket the Socket to which synthesized wave bytes will be sent
*/
public SocketAudioPlayer(Socket socket) {
this.socket = socket;
try {
this.dataOutputStream = new DataOutputStream
(socket.getOutputStream());
debug = Utilities.getBoolean("debug");
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
/**
* Sets the audio format to use for the next set of outputs. Since
* an audio player can be shared by a number of voices, and since
* voices can have different AudioFormats (sample rates for
* example), it is necessary to allow clients to dynamically set
* the audio format for the player.
*
* @param format the audio format
*/
public void setAudioFormat(AudioFormat format) {
this.audioFormat = format;
}
/**
* Retrieves the audio format for this player
*
* @return the current audio format
*
*/
public AudioFormat getAudioFormat() {
return this.audioFormat;
}
/**
* Pauses all audio output on this player. Play can be resumed
* with a call to resume. Not implemented in this Player.
*/
public void pause() {}
/**
* Resumes audio output on this player. Not implemented in this Player.
*/
public void resume() {}
/**
* Prepares for another batch of output. Larger groups of output
* (such as all output associated with a single FreeTTSSpeakable)
* should be grouped between a reset/drain pair.
*/
public void reset() {}
/**
* Flushes all the audio data to the Socket.
*
* @return <code>true</code> all the time
*/
public boolean drain() {
try {
dataOutputStream.flush();
} catch (IOException ioe) {
ioe.printStackTrace();
}
return true;
}
/**
* Starts the output of a set of data. Audio data for a single
* utterance should be grouped between begin/end pairs.
*
* @param size the size of data in bytes to be output before
* <code>end</code> is called.
*/
public void begin(int size) {
try {
bytesToPlay = size;
firstByteSent = false;
dataOutputStream.writeBytes(String.valueOf(size) + "\n");
dataOutputStream.flush();
if (debug) {
System.out.println("begin: " + size);
}
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
/**
* Starts the first sample timer (none in this player)
*/
public void startFirstSampleTimer() {
}
/**
* Signals the end of a set of data. Audio data for a single
* utterance should be groupd between <code> begin/end </code> pairs.
*
* @return <code>true</code> if the audio was output properly,
* <code> false</code> if the output was cancelled
* or interrupted.
*
*/
public boolean end() {
if (debug) {
System.out.println("end");
}
if (bytesPlayed < bytesToPlay) {
int bytesNotPlayed = bytesToPlay - bytesPlayed;
write(new byte[bytesNotPlayed], 0, bytesNotPlayed);
}
bytesToPlay = 0;
bytesPlayed = 0;
return true;
}
/**
* Cancels all queued output. All 'write' calls until the next
* reset will return false. Not implemented in this Player.
*
*/
public void cancel() {}
/**
* Waits for all audio playback to stop, and closes this AudioPlayer.
* Not implemented in this Player.
*/
public void close() {}
/**
* Returns the current volume. The volume is specified as a number
* between 0.0 and 1.0, where 1.0 is the maximum volume and 0.0 is
* the minimum volume. Not implemented in this Player.
*
* @return the current volume (between 0 and 1)
*/
public float getVolume() {
return -1;
}
/**
* Sets the current volume. The volume is specified as a number
* between 0.0 and 1.0, where 1.0 is the maximum volume and 0.0 is
* the minimum volume. Not implemented in this Player.
*
* @param volume the new volume (between 0 and 1)
*/
public void setVolume(float volume) {}
/**
* Gets the amount of audio played since the last resetTime.
* Not implemented in this Player.
*
* @returns the amount of audio in milliseconds
*/
public long getTime() {
return -1;
}
/**
* Resets the audio clock. Not implemented in this Player.
*/
public void resetTime() {}
/**
* Writes the given bytes to the audio stream
*
* @param audioData audio data to write to the device
*
* @return <code>true</code> of the write completed successfully,
* <code> false </code>if the write was cancelled.
*/
public boolean write(byte[] audioData) {
return write(audioData, 0, audioData.length);
}
/**
* Writes the given bytes to the audio stream
*
* @param audioData audio data to write to the device
* @param offset the offset into the buffer
* @param size the number of bytes to write.
*
* @return <code>true</code> of the write completed successfully,
* <code> false </code>if the write was cancelled.
*/
public boolean write(byte[] audioData, int offset, int size) {
try {
if (!firstByteSent) {
firstByteTime = System.currentTimeMillis();
firstByteSent = true;
}
bytesPlayed += size;
dataOutputStream.write(audioData, offset, size);
dataOutputStream.flush();
if (debug) {
System.out.println("sent " + size + " bytes " +
audioData[0] + " " + audioData[size/2]);
}
return true;
} catch (IOException ioe) {
ioe.printStackTrace();
return false;
}
}
/**
* Shows metrics for this audio player. Not implemented in this Player.
*/
public void showMetrics() {}
/**
* Returns the first byte sent time in milliseconds, the last time it
* was recorded.
*
* @return the last first byte sent time in milliseconds
*/
public long getFirstByteSentTime() {
return firstByteTime;
}
}
| |
package io.joynr.arbitration;
/*
* #%L
* %%
* Copyright (C) 2011 - 2017 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.HashMap;
import java.util.Map;
import com.google.common.collect.Maps;
/**
* Storage class to pass all settings to an arbitrator defining the strategy and conditions for provider arbitration.
*
*/
public class DiscoveryQos {
public static final DiscoveryQos NO_FILTER;
public static final long NO_VALUE = -1L;
private long discoveryTimeoutMs;
private ArbitrationStrategy arbitrationStrategy;
private ArbitrationStrategyFunction arbitrationStrategyFunction;
private static final ArbitrationStrategy DEFAULT_ARBITRATIONSTRATEGY = ArbitrationStrategy.LastSeen;
long cacheMaxAgeMs;
private static final long DEFAULT_CACHEMAXAGE = 0L;
public static final long NO_MAX_AGE = Long.MAX_VALUE;
private boolean providerMustSupportOnChange;
private static final boolean DEFAULT_PROVIDERMUSTSUPPORTONCHANGE = false;
private long retryIntervalMs;
private DiscoveryScope discoveryScope;
private static final DiscoveryScope DEFAULT_DISCOVERYSCOPE = DiscoveryScope.LOCAL_AND_GLOBAL;
private HashMap<String, String> customParameters = Maps.newHashMap();
/**
* DiscoveryQos object with default values.
*/
static {
NO_FILTER = new DiscoveryQos(Long.MAX_VALUE, ArbitrationStrategy.NotSet, Long.MAX_VALUE);
}
public DiscoveryQos() {
this.discoveryTimeoutMs = NO_VALUE;
this.arbitrationStrategy = DEFAULT_ARBITRATIONSTRATEGY;
this.cacheMaxAgeMs = DEFAULT_CACHEMAXAGE;
this.providerMustSupportOnChange = DEFAULT_PROVIDERMUSTSUPPORTONCHANGE;
this.retryIntervalMs = NO_VALUE;
this.discoveryScope = DEFAULT_DISCOVERYSCOPE;
}
/**
* @param discoveryTimeout
* Timeout for rpc calls to wait for arbitration to finish.
* @param arbitrationStrategy
* Strategy for choosing the appropriate provider from the list returned by the capabilities directory
* @param cacheMaxAge
* Maximum age of entries in the localCapabilitiesDirectory. If this value filters out all entries of the
* local capabilities directory a lookup in the global capabilitiesDirectory will take place.
*/
public DiscoveryQos(long discoveryTimeout, ArbitrationStrategy arbitrationStrategy, long cacheMaxAge) {
this(discoveryTimeout, arbitrationStrategy, cacheMaxAge, DEFAULT_DISCOVERYSCOPE);
}
/**
* @param discoveryTimeout
* Timeout for rpc calls to wait for arbitration to finish.
* @param arbitrationStrategyFunction
* function that chooses the appropriate provider from the list returned by the capabilities directory
* @param cacheMaxAge
* Maximum age of entries in the localCapabilitiesDirectory. If this value filters out all entries of the
* local capabilities directory a lookup in the global capabilitiesDirectory will take place.
*/
public DiscoveryQos(long discoveryTimeout, ArbitrationStrategyFunction arbitrationStrategyFunction, long cacheMaxAge) {
this(discoveryTimeout, arbitrationStrategyFunction, cacheMaxAge, DEFAULT_DISCOVERYSCOPE);
}
/**
* @param discoveryTimeout
* Timeout for rpc calls to wait for arbitration to finish.
* @param arbitrationStrategy
* Strategy for choosing the appropriate provider from the list returned by the capabilities directory
* @param cacheMaxAge
* Maximum age of entries in the localCapabilitiesDirectory. If this value filters out all entries of the
* local capabilities directory a lookup in the global capabilitiesDirectory will take place.
* @param discoveryScope
* determines where the discovery process will look for matching providers<br>
* <ul>
* <li>LOCAL_ONLY: only locally registered providers will be considered.
* <li>LOCAL_THEN_GLOBAL locally registered providers are preferred. When none is found, the global
* providers are included in search results.
* <li>LOCAL_AND_GLOBAL: all providers registered locally, and query results from the gobal directory are
* combined and returned.
* <li>GLOBAL_ONLY only returns providers that are found in the global directory.
* </ul>
*/
public DiscoveryQos(long discoveryTimeout,
ArbitrationStrategy arbitrationStrategy,
long cacheMaxAge,
DiscoveryScope discoveryScope) {
this(discoveryTimeout, NO_VALUE, arbitrationStrategy, cacheMaxAge, discoveryScope);
}
/**
* @param discoveryTimeout
* Timeout for rpc calls to wait for arbitration to finish.
* @param retryIntervalMs
* Lookups for the arbitration will be repeated after this time interval if they were not successful
* @param arbitrationStrategy
* Strategy for choosing the appropriate provider from the list returned by the capabilities directory
* @param cacheMaxAge
* Maximum age of entries in the localCapabilitiesDirectory. If this value filters out all entries of the
* local capabilities directory a lookup in the global capabilitiesDirectory will take place.
* @param discoveryScope
* determines where the discovery process will look for matching providers<br>
* <ul>
* <li>LOCAL_ONLY: only locally registered providers will be considered.
* <li>LOCAL_THEN_GLOBAL locally registered providers are preferred. When none is found, the global
* providers are included in search results.
* <li>LOCAL_AND_GLOBAL: all providers registered locally, and query results from the gobal directory are
* combined and returned.
* <li>GLOBAL_ONLY only returns providers that are found in the global directory.
* </ul>
*/
public DiscoveryQos(long discoveryTimeout,
long retryIntervalMs,
ArbitrationStrategy arbitrationStrategy,
long cacheMaxAge,
DiscoveryScope discoveryScope) {
if (arbitrationStrategy.equals(ArbitrationStrategy.Custom)) {
throw new IllegalStateException("A Custom strategy can only be set by passing an arbitration strategy function to the DisocveryQos constructor");
}
this.cacheMaxAgeMs = cacheMaxAge;
this.discoveryScope = discoveryScope;
this.discoveryTimeoutMs = discoveryTimeout;
this.retryIntervalMs = retryIntervalMs;
this.arbitrationStrategy = arbitrationStrategy;
this.retryIntervalMs = NO_VALUE;
this.providerMustSupportOnChange = DEFAULT_PROVIDERMUSTSUPPORTONCHANGE;
}
public DiscoveryQos(long discoveryTimeout,
ArbitrationStrategyFunction arbitrationStrategyFunction,
long cacheMaxAge,
DiscoveryScope discoveryScope) {
this(discoveryTimeout, NO_VALUE, arbitrationStrategyFunction, cacheMaxAge, discoveryScope);
}
public DiscoveryQos(long discoveryTimeout,
long retryIntervalMs,
ArbitrationStrategyFunction arbitrationStrategyFunction,
long cacheMaxAge,
DiscoveryScope discoveryScope) {
this.arbitrationStrategy = ArbitrationStrategy.Custom;
this.discoveryTimeoutMs = discoveryTimeout;
this.arbitrationStrategyFunction = arbitrationStrategyFunction;
this.cacheMaxAgeMs = cacheMaxAge;
this.discoveryScope = discoveryScope;
this.retryIntervalMs = NO_VALUE;
this.providerMustSupportOnChange = DEFAULT_PROVIDERMUSTSUPPORTONCHANGE;
}
/**
* The discovery process outputs a list of matching providers. The arbitration strategy then chooses one or more of
* them to be used by the proxy.
*
* @param arbitrationStrategy
* Defines the strategy used to choose the "best" provider.
*/
public void setArbitrationStrategy(ArbitrationStrategy arbitrationStrategy) {
if (arbitrationStrategy.equals(ArbitrationStrategy.Custom)) {
throw new IllegalStateException("A Custom strategy can only be set by passing an arbitration strategy function to the DisocveryQos constructor");
}
this.arbitrationStrategy = arbitrationStrategy;
}
/**
* The discovery process outputs a list of matching providers. The arbitration strategy then chooses one or more of
* them to be used by the proxy.
*
* @return the arbitration strategy used to pick the "best" provider of the list of matching providers
*/
public ArbitrationStrategy getArbitrationStrategy() {
return arbitrationStrategy;
}
/**
* As soon as the arbitration QoS is set on the proxy builder, discovery of suitable providers is triggered. If the
* discovery process does not find matching providers within the arbitration timeout duration it will be terminated
* and you will get an arbitration exception.
*
* @param discoveryTimeoutMs
* Sets the amount of time the arbitrator keeps trying to find a suitable provider. The arbitration
* lookup might happen multiple times during this time span.
*/
public void setDiscoveryTimeoutMs(long discoveryTimeoutMs) {
this.discoveryTimeoutMs = discoveryTimeoutMs;
}
/**
* As soon as the arbitration QoS is set on the proxy builder, discovery of suitable providers is triggered. If the
* discovery process does not find matching providers within the arbitration timeout duration it will be terminated
* and you will get an arbitration exception.
*
* @return the duration used to discover matching providers
*/
public long getDiscoveryTimeoutMs() {
return discoveryTimeoutMs;
}
/**
* addCustomParameter allows to add special parameters to the DiscoveryQos which will be used only by some
* strategies.
*
* @param key
* String to identify the arbitration parameter
* @param value
* Any object used by the arbitrator to choose a provider.
*/
public void addCustomParameter(String key, String value) {
customParameters.put(key, value);
}
/**
* getCustomParameter returns the parameters previously specified by addParameter
*
* @param key key to identify the custom parameter
* @return Returns the value to which the specified key is mapped, or null if the map of additional parameters
* contains no mapping for the key
*/
public Object getCustomParameter(String key) {
return customParameters.get(key);
}
/**
* Provider entries in the global capabilities directory are cached locally. Discovery will consider entries in this
* cache valid if they are younger than the max age of cached providers as defined in the QoS. All valid entries
* will be processed by the arbitrator when searching for and arbitrating the "best" matching provider.
* <p>
* NOTE: Valid cache entries might prevent triggering a lookup in the global capabilities directory. Therefore,
* providers registered with the global capabilities after the last lookup and before the cacheMaxAge expires will
* not be discovered.
*
* @return the maximum age of locally cached provider entries to be used during discovery and arbitration before
* refreshing from the global directory
*/
public long getCacheMaxAgeMs() {
return cacheMaxAgeMs;
}
/**
* Provider entries in the global capabilities directory are cached locally. Discovery will consider entries in this
* cache valid if they are younger than the max age of cached providers as defined in the QoS. All valid entries
* will be processed by the arbitrator when searching for and arbitrating the "best" matching provider.
* <p>
* NOTE: Valid cache entries might prevent triggering a lookup in the global capabilities directory. Therefore,
* providers registered with the global capabilities after the last lookup and before the cacheMaxAge expires will
* not be discovered.
*
* @param cacheMaxAgeMs
* Maximum age of entries in the localCapabilitiesDirectory. If this value filters out all entries of the
* local capabilities directory a lookup in the global capabilitiesDirectory will take place.
*/
public void setCacheMaxAgeMs(long cacheMaxAgeMs) {
this.cacheMaxAgeMs = cacheMaxAgeMs < 0L ? 0L : cacheMaxAgeMs;
}
public boolean isLocalOnly() {
return discoveryScope == DiscoveryScope.LOCAL_ONLY;
}
/**
*
* @return the interval used for retrying discovery if the previous attempt was unsuccessful
*/
public long getRetryIntervalMs() {
return retryIntervalMs;
}
/**
* @param retryIntervalMs
* The time to wait between discovery retries after encountering a discovery error. The actual delay may
* be longer, as there is a system-wide minimum delay.
*/
public void setRetryIntervalMs(long retryIntervalMs) {
this.retryIntervalMs = retryIntervalMs;
}
/**
* Indicates if arbitration should only consider providers that support onChange subscriptions
*
* @return true if only providers that support onChange subscriptions are considered
*/
public boolean getProviderMustSupportOnChange() {
return providerMustSupportOnChange;
}
/**
* Indicates if arbitration should only consider providers that support onChange subscriptions
*
* @param providerMustSupportOnChange
* true if only providers that support onChange subscriptions should be considered
*/
public void setProviderMustSupportOnChange(boolean providerMustSupportOnChange) {
this.providerMustSupportOnChange = providerMustSupportOnChange;
}
/**
* @param discoveryScope selects capability registries to choose from for provider discovery
*/
public void setDiscoveryScope(DiscoveryScope discoveryScope) {
this.discoveryScope = discoveryScope;
}
/**
* @return scope criteria to select from capability registries for provider discovery
*/
public DiscoveryScope getDiscoveryScope() {
return discoveryScope;
}
ArbitrationStrategyFunction getArbitrationStrategyFunction() {
return arbitrationStrategyFunction;
}
public Map<String, String> getCustomParameters() {
return customParameters;
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.compositor.layouts;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.RectF;
import android.os.Build;
import android.util.SparseArray;
import android.view.MotionEvent;
import android.view.ViewGroup;
import org.chromium.base.CommandLine;
import org.chromium.chrome.browser.ChromeApplication;
import org.chromium.chrome.browser.ChromeSwitches;
import org.chromium.chrome.browser.UrlConstants;
import org.chromium.chrome.browser.compositor.bottombar.contextualsearch.ContextualSearchPanel;
import org.chromium.chrome.browser.compositor.layouts.components.LayoutTab;
import org.chromium.chrome.browser.compositor.layouts.components.VirtualView;
import org.chromium.chrome.browser.compositor.layouts.content.TabContentManager;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.CascadeEventFilter;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.ContextualSearchEventFilter;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.EdgeSwipeEventFilter;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.EdgeSwipeEventFilter.ScrollDirection;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.EdgeSwipeHandler;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.EmptyEdgeSwipeHandler;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.EventFilter;
import org.chromium.chrome.browser.compositor.layouts.eventfilter.GestureHandler;
import org.chromium.chrome.browser.compositor.layouts.phone.ContextualSearchLayout;
import org.chromium.chrome.browser.compositor.overlays.SceneOverlay;
import org.chromium.chrome.browser.contextualsearch.ContextualSearchManagementDelegate;
import org.chromium.chrome.browser.contextualsearch.ContextualSearchManager.ContextualSearchContentViewDelegate;
import org.chromium.chrome.browser.contextualsearch.ContextualSearchStaticEventFilter;
import org.chromium.chrome.browser.contextualsearch.ContextualSearchStaticEventFilter.ContextualSearchTapHandler;
import org.chromium.chrome.browser.device.DeviceClassManager;
import org.chromium.chrome.browser.dom_distiller.ReaderModeEdgeSwipeHandler;
import org.chromium.chrome.browser.dom_distiller.ReaderModePanel;
import org.chromium.chrome.browser.dom_distiller.ReaderModeStaticEventFilter;
import org.chromium.chrome.browser.dom_distiller.ReaderModeStaticEventFilter.ReaderModePanelSelector;
import org.chromium.chrome.browser.dom_distiller.ReaderModeStaticEventFilter.ReaderModeTapHandler;
import org.chromium.chrome.browser.fullscreen.FullscreenManager;
import org.chromium.chrome.browser.tab.ChromeTab;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.tabmodel.TabCreatorManager;
import org.chromium.chrome.browser.tabmodel.TabModel;
import org.chromium.chrome.browser.tabmodel.TabModelSelector;
import org.chromium.chrome.browser.tabmodel.TabModelSelectorTabObserver;
import org.chromium.chrome.browser.tabmodel.TabModelUtils;
import org.chromium.chrome.browser.tabmodel.document.DocumentTabModelSelector;
import org.chromium.chrome.browser.util.FeatureUtilities;
import org.chromium.content.browser.ContentViewCore;
import org.chromium.ui.resources.dynamics.DynamicResourceLoader;
import java.util.List;
/**
* A {@link Layout} controller for a simple document use case. This class is responsible for
* driving all {@link Layout}s that get shown via the {@link LayoutManager}.
*/
public class LayoutManagerDocument extends LayoutManager
implements ContextualSearchTapHandler, ContextualSearchContentViewDelegate,
ReaderModeTapHandler {
// Layouts
/** A {@link Layout} used for showing a normal web page. */
protected final StaticLayout mStaticLayout;
/** A {@link Layout} used for when the contextual search panel is up. */
protected final ContextualSearchLayout mContextualSearchLayout;
protected EdgeNavigationLayout mEdgeNavigationLayout;
// Event Filters
private final EdgeSwipeEventFilter mStaticEdgeEventFilter;
private final ContextualSearchEventFilter mContextualSearchEventFilter;
private final EdgeSwipeHandler mToolbarSwipeHandler;
// Event Filter Handlers
/** A {@link GestureHandler} that will delegate all events to {@link #getActiveLayout()}. */
protected final GestureHandler mGestureHandler;
private final EdgeSwipeHandler mContextualSearchEdgeSwipeHandler;
private final EdgeSwipeHandler mReaderModeEdgeSwipeHandler;
// Internal State
private final SparseArray<LayoutTab> mTabCache = new SparseArray<LayoutTab>();
private final ContextualSearchPanel mContextualSearchPanel;
/** A delegate for interacting with the Contextual Search manager. */
protected ContextualSearchManagementDelegate mContextualSearchDelegate;
@SuppressWarnings("unused") private TabModelSelectorTabObserver mTabModelSelectorTabObserver;
private final ReaderModePanelSelector mReaderModePanelSelector;
/**
* Creates a {@link LayoutManagerDocument} instance.
* @param host A {@link LayoutManagerHost} instance.
*/
public LayoutManagerDocument(LayoutManagerHost host) {
super(host);
Context context = host.getContext();
LayoutRenderHost renderHost = host.getLayoutRenderHost();
mContextualSearchPanel = new ContextualSearchPanel(context, this);
mReaderModePanelSelector = new ReaderModePanelSelector() {
@Override
public ReaderModePanel getActiveReaderModePanel() {
if (mStaticLayout == null || !mStaticLayout.isActive()) return null;
ReaderModePanel panel = mStaticLayout.getReaderModePanel();
if (panel == null) return null;
if (!panel.isShowing() || !panel.isReaderModeCurrentlyAllowed()) return null;
return panel;
}
};
// Build Event Filter Handlers
mContextualSearchEdgeSwipeHandler = new ContextualSearchEdgeSwipeHandler(this);
mReaderModeEdgeSwipeHandler = new ReaderModeEdgeSwipeHandler(
mReaderModePanelSelector, this);
mGestureHandler = new GestureHandlerLayoutDelegate(this);
mToolbarSwipeHandler = new ToolbarSwipeHandler(this);
if (!CommandLine.getInstance().hasSwitch(
ChromeSwitches.ENABLE_SUPPRESSED_CHROMIUM_FEATURES)) {
mEdgeNavigationLayout = EdgeNavigationLayout.getNewLayout(context, this, renderHost);
}
// Build Event Filters
mStaticEdgeEventFilter =
new EdgeSwipeEventFilter(context, this, new StaticEdgeSwipeHandler());
mContextualSearchEventFilter = new ContextualSearchEventFilter(
context, this, mGestureHandler, mContextualSearchPanel);
EventFilter contextualSearchStaticEventFilter = new ContextualSearchStaticEventFilter(
context, this, mContextualSearchPanel, mContextualSearchEdgeSwipeHandler, this);
EventFilter readerModeStaticEventFilter = new ReaderModeStaticEventFilter(
context, this, mReaderModePanelSelector, mReaderModeEdgeSwipeHandler, this);
EventFilter staticCascadeEventFilter;
if (mEdgeNavigationLayout != null) {
staticCascadeEventFilter = new CascadeEventFilter(context, this,
new EventFilter[]{readerModeStaticEventFilter,contextualSearchStaticEventFilter,
mEdgeNavigationLayout.getEventFilter(), mStaticEdgeEventFilter});
} else {
staticCascadeEventFilter = new CascadeEventFilter(context, this,
new EventFilter[]{readerModeStaticEventFilter,contextualSearchStaticEventFilter,
mStaticEdgeEventFilter});
}
// Build Layouts
mStaticLayout = new StaticLayout(
context, this, renderHost, staticCascadeEventFilter, mContextualSearchPanel);
mContextualSearchLayout = new ContextualSearchLayout(
context, this, renderHost, mContextualSearchEventFilter, mContextualSearchPanel);
// Set up layout parameters
mStaticLayout.setLayoutHandlesTabLifecycles(true);
setNextLayout(null);
}
@Override
public void init(TabModelSelector selector, TabCreatorManager creator,
TabContentManager content, ViewGroup androidContentContainer,
ContextualSearchManagementDelegate contextualSearchDelegate,
DynamicResourceLoader dynamicResourceLoader) {
// Save state
mContextualSearchDelegate = contextualSearchDelegate;
// Initialize Event Filters
mStaticEdgeEventFilter.setTabModelSelector(selector);
mContextualSearchEventFilter.setManagementDelegate(contextualSearchDelegate);
// Initialize Layouts
mStaticLayout.setTabModelSelector(selector, content);
mContextualSearchLayout.setTabModelSelector(selector, content);
if (mEdgeNavigationLayout != null)
mEdgeNavigationLayout.setTabModelSelector(selector, content);
// Initialize Contextual Search Panel
mContextualSearchPanel.setManagementDelegate(contextualSearchDelegate);
mContextualSearchPanel.setDynamicResourceLoader(dynamicResourceLoader);
// Set back flow communication
if (contextualSearchDelegate != null) {
contextualSearchDelegate.setContextualSearchPanelDelegate(mContextualSearchPanel);
}
mTabModelSelectorTabObserver = new TabModelSelectorTabObserver(selector) {
@Override
public void onContentChanged(Tab tab) {
initLayoutTabFromHost(tab.getId());
}
@Override
public void onBackgroundColorChanged(Tab tab, int color) {
initLayoutTabFromHost(tab.getId());
}
};
super.init(selector, creator, content, androidContentContainer, contextualSearchDelegate,
dynamicResourceLoader);
}
@Override
public void destroy() {
super.destroy();
if (mStaticLayout != null) mStaticLayout.destroy();
if (mContextualSearchLayout != null) mContextualSearchLayout.destroy();
if (mTabModelSelectorTabObserver != null) mTabModelSelectorTabObserver.destroy();
}
@Override
public void getVirtualViews(List<VirtualView> views) {
// Nothing to do here yet.
}
@Override
protected void onViewportChanged(RectF viewportDp) {
super.onViewportChanged(viewportDp);
for (int i = 0; i < mTabCache.size(); i++) {
// This assumes that the content width/height is always the size of the host.
mTabCache.valueAt(i).setContentSize(viewportDp.width(), viewportDp.height());
}
}
/**
* @return The {@link EdgeSwipeHandler} responsible for processing swipe events for the toolbar.
*/
@Override
public EdgeSwipeHandler getTopSwipeHandler() {
return mToolbarSwipeHandler;
}
/**
* Clears all content associated with {@code tabId} from the internal caches.
* @param tabId The id of the tab to clear.
*/
protected void emptyCachesExcept(int tabId) {
LayoutTab tab = mTabCache.get(tabId);
mTabCache.clear();
if (tab != null) mTabCache.put(tabId, tab);
}
/**
* Adds the {@link SceneOverlay} across all {@link Layout}s owned by this class.
* @param helper A {@link SceneOverlay} instance.
*/
protected void addGlobalSceneOverlay(SceneOverlay helper) {
mStaticLayout.addSceneOverlay(helper);
mContextualSearchLayout.addSceneOverlay(helper);
if (mEdgeNavigationLayout != null)
mEdgeNavigationLayout.addSceneOverlay(helper);
}
/**
* @param tabId The id of the tab represented by a {@link LayoutTab}.
* @return A {@link LayoutTab} if one exists or {@code null} if none can be found.
*/
protected LayoutTab getExistingLayoutTab(int tabId) {
return mTabCache.get(tabId);
}
@Override
protected Layout getDefaultLayout() {
return mStaticLayout;
}
@Override
public void initLayoutTabFromHost(final int tabId) {
if (getTabModelSelector() == null || getActiveLayout() == null) return;
TabModelSelector selector = getTabModelSelector();
ChromeTab tab = ChromeTab.fromTab(selector.getTabById(tabId));
if (tab == null) return;
LayoutTab layoutTab = mTabCache.get(tabId);
if (layoutTab == null) return;
String url = tab.getUrl();
boolean isNativePage = url != null && url.startsWith(UrlConstants.CHROME_NATIVE_SCHEME);
boolean canUseLiveTexture =
tab.getContentViewCore() != null && !tab.isShowingSadTab() && !isNativePage;
layoutTab.initFromHost(tab.getBackgroundColor(), tab.getFallbackTextureId(),
tab.shouldStall(), canUseLiveTexture);
mHost.requestRender();
}
@Override
public LayoutTab createLayoutTab(int id, boolean incognito, boolean showCloseButton,
boolean isTitleNeeded, float maxContentWidth, float maxContentHeight) {
LayoutTab tab = mTabCache.get(id);
if (tab == null) {
tab = new LayoutTab(id, incognito, mLastContentWidthDp, mLastContentHeightDp,
showCloseButton, isTitleNeeded);
mTabCache.put(id, tab);
} else {
tab.init(mLastContentWidthDp, mLastContentHeightDp, showCloseButton, isTitleNeeded);
}
if (maxContentWidth > 0.f) tab.setMaxContentWidth(maxContentWidth);
if (maxContentHeight > 0.f) tab.setMaxContentHeight(maxContentHeight);
return tab;
}
@Override
public void releaseTabLayout(int id) {
mTabCache.remove(id);
}
@Override
public boolean onInterceptTouchEvent(MotionEvent e, boolean isKeyboardShowing) {
boolean intercepted = super.onInterceptTouchEvent(e, isKeyboardShowing);
if (intercepted) getActiveLayout().unstallImmediately();
return intercepted;
}
/**
* Should be called when the user presses the back button on the phone.
* @return Whether or not the back button was consumed by the active {@link Layout}.
*/
@Override
public boolean onBackPressed() {
return getActiveLayout() != null && getActiveLayout().onBackPressed();
}
@Override
public void handleTapContextualSearchBar(long time, float x, float y) {
if (getActiveLayout() == mContextualSearchLayout) return;
if (mContextualSearchDelegate == null) return;
// When not in compatibility mode, tapping on the Search Bar will expand the Panel,
// therefore we must start showing the ContextualSearchLayout.
// TODO(pedrosimonetti): once we implement the close button, a tap in the Panel might
// trigger the Panel to close, not expand, so in that case we don't want to show the
// ContextualSearchLayout. Coordinate with dtrainor@ to solve this. It might be
// necessary for the ContextualSearchPanel to be able to trigger the display of the
// ContextualSearchLayout.
if (!mContextualSearchDelegate.isRunningInCompatibilityMode()) {
showContextualSearchLayout(true);
}
mContextualSearchPanel.handleClick(time, x, y);
}
@Override
public void setContextualSearchContentViewCore(ContentViewCore contentViewCore) {
mHost.onContentViewCoreAdded(contentViewCore);
}
@Override
public void releaseContextualSearchContentViewCore() {
if (getTabModelSelector() == null) return;
Tab tab = getTabModelSelector().getCurrentTab();
if (tab != null) tab.updateFullscreenEnabledState();
}
private void showContextualSearchLayout(boolean animate) {
mContextualSearchDelegate.preserveBasePageSelectionOnNextLossOfFocus();
startShowing(mContextualSearchLayout, animate);
}
private class StaticEdgeSwipeHandler extends EmptyEdgeSwipeHandler {
@Override
public void swipeStarted(ScrollDirection direction, float x, float y) {
}
@Override
public boolean isSwipeEnabled(ScrollDirection direction) {
FullscreenManager fullscreenManager = mHost.getFullscreenManager();
return direction == ScrollDirection.DOWN && fullscreenManager != null
&& fullscreenManager.getPersistentFullscreenMode();
}
}
private class ContextualSearchEdgeSwipeHandler extends EdgeSwipeHandlerLayoutDelegate {
public ContextualSearchEdgeSwipeHandler(LayoutProvider provider) {
super(provider);
}
@Override
public void swipeStarted(ScrollDirection direction, float x, float y) {
if (isCompatabilityMode()) {
mContextualSearchDelegate.openResolvedSearchUrlInNewTab();
return;
}
if (getActiveLayout() != mContextualSearchLayout) {
showContextualSearchLayout(false);
}
super.swipeStarted(direction, x, y);
}
@Override
public boolean isSwipeEnabled(ScrollDirection direction) {
return direction == ScrollDirection.UP
&& mContextualSearchDelegate != null
&& mContextualSearchDelegate.isShowingSearchPanel();
}
private boolean isCompatabilityMode() {
return mContextualSearchDelegate != null
&& mContextualSearchDelegate.isRunningInCompatibilityMode();
}
}
/**
* A {@link EdgeSwipeHandler} meant to respond to edge events for the toolbar.
*/
private class ToolbarSwipeHandler extends EdgeSwipeHandlerLayoutDelegate {
private ScrollDirection mLastScroll;
/**
* Creates an instance of the {@link ToolbarSwipeHandler}.
* @param provider A {@link LayoutProvider} instance.
*/
public ToolbarSwipeHandler(LayoutProvider provider) {
super(provider);
}
@Override
public void swipeStarted(ScrollDirection direction, float x, float y) {
super.swipeStarted(direction, x, y);
mLastScroll = direction;
}
@Override
public void swipeFinished() {
super.swipeFinished();
changeTabs();
}
@Override
public void swipeFlingOccurred(float x, float y, float tx, float ty, float vx, float vy) {
super.swipeFlingOccurred(x, y, tx, ty, vx, vy);
changeTabs();
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void changeTabs() {
DocumentTabModelSelector selector =
ChromeApplication.getDocumentTabModelSelector();
TabModel tabModel = selector.getCurrentModel();
int currentIndex = tabModel.index();
if (mLastScroll == ScrollDirection.LEFT) {
if (currentIndex < tabModel.getCount() - 1) {
TabModelUtils.setIndex(tabModel, currentIndex + 1);
}
} else {
if (currentIndex > 0) {
TabModelUtils.setIndex(tabModel, currentIndex - 1);
}
}
}
@Override
public boolean isSwipeEnabled(ScrollDirection direction) {
FullscreenManager manager = mHost.getFullscreenManager();
if (getActiveLayout() != mStaticLayout
|| !FeatureUtilities.isDocumentModeEligible(mHost.getContext())
|| !DeviceClassManager.enableToolbarSwipe(
FeatureUtilities.isDocumentMode(mHost.getContext()))
|| (manager != null && manager.getPersistentFullscreenMode())) {
return false;
}
return direction == ScrollDirection.LEFT || direction == ScrollDirection.RIGHT;
}
}
@Override
public void handleTapReaderModeBar(long time, float x, float y) {
ReaderModePanel activePanel = mReaderModePanelSelector.getActiveReaderModePanel();
if (activePanel != null) activePanel.handleClick(time, x, y);
}
}
| |
/*
* Copyright 2010-2020 Alfresco Software, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.api.runtime.model.impl;
import java.util.Date;
import java.util.Objects;
import org.activiti.api.process.model.StartMessageSubscription;
public class StartMessageSubscriptionImpl implements StartMessageSubscription {
private String id;
private String eventName;
private String processDefinitionId;
private String configuration;
private String activityId;
private Date created;
private StartMessageSubscriptionImpl(Builder builder) {
this.id = builder.id;
this.eventName = builder.eventName;
this.processDefinitionId = builder.processDefinitionId;
this.configuration = builder.configuration;
this.activityId = builder.activityId;
this.created = builder.created;
}
StartMessageSubscriptionImpl() {
}
public String getId() {
return id;
}
public String getEventName() {
return eventName;
}
public String getProcessDefinitionId() {
return processDefinitionId;
}
public String getConfiguration() {
return configuration;
}
public String getActivityId() {
return activityId;
}
public Date getCreated() {
return created;
}
@Override
public int hashCode() {
return Objects.hash(activityId,
configuration,
created,
eventName,
id,
processDefinitionId);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StartMessageSubscriptionImpl other = (StartMessageSubscriptionImpl) obj;
return Objects.equals(activityId, other.activityId) &&
Objects.equals(configuration, other.configuration) &&
Objects.equals(created, other.created) &&
Objects.equals(eventName, other.eventName) &&
Objects.equals(id, other.id) &&
Objects.equals(processDefinitionId, other.processDefinitionId);
}
@Override
public String toString() {
StringBuilder builder2 = new StringBuilder();
builder2.append("MessageEventSubscriptionImpl [id=")
.append(id)
.append(", eventName=")
.append(eventName)
.append(", processDefinitionId=")
.append(processDefinitionId)
.append(", configuration=")
.append(configuration)
.append(", activityId=")
.append(activityId)
.append(", created=")
.append(created)
.append("]");
return builder2.toString();
}
/**
* Creates a builder to build {@link StartMessageSubscriptionImpl}.
* @return created builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Creates a builder to build {@link StartMessageSubscriptionImpl} and initialize it with the given object.
* @param startMessageSubscriptionImpl to initialize the builder with
* @return created builder
*/
public static Builder builderFrom(StartMessageSubscriptionImpl startMessageSubscriptionImpl) {
return new Builder(startMessageSubscriptionImpl);
}
/**
* Builder to build {@link StartMessageSubscriptionImpl}.
*/
public static final class Builder {
private String id;
private String eventName;
private String processDefinitionId;
private String configuration;
private String activityId;
private Date created;
public Builder() {
}
private Builder(StartMessageSubscriptionImpl startMessageSubscriptionImpl) {
this.id = startMessageSubscriptionImpl.id;
this.eventName = startMessageSubscriptionImpl.eventName;
this.processDefinitionId = startMessageSubscriptionImpl.processDefinitionId;
this.configuration = startMessageSubscriptionImpl.configuration;
this.activityId = startMessageSubscriptionImpl.activityId;
this.created = startMessageSubscriptionImpl.created;
}
/**
* Builder method for id parameter.
* @param id field to set
* @return builder
*/
public Builder withId(String id) {
this.id = id;
return this;
}
/**
* Builder method for eventName parameter.
* @param eventName field to set
* @return builder
*/
public Builder withEventName(String eventName) {
this.eventName = eventName;
return this;
}
/**
* Builder method for processDefinitionId parameter.
* @param processDefinitionId field to set
* @return builder
*/
public Builder withProcessDefinitionId(String processDefinitionId) {
this.processDefinitionId = processDefinitionId;
return this;
}
/**
* Builder method for configuration parameter.
* @param configuration field to set
* @return builder
*/
public Builder withConfiguration(String configuration) {
this.configuration = configuration;
return this;
}
/**
* Builder method for activityId parameter.
* @param activityId field to set
* @return builder
*/
public Builder withActivityId(String activityId) {
this.activityId = activityId;
return this;
}
/**
* Builder method for created parameter.
* @param created field to set
* @return builder
*/
public Builder withCreated(Date created) {
this.created = created;
return this;
}
/**
* Builder method of the builder.
* @return built class
*/
public StartMessageSubscriptionImpl build() {
return new StartMessageSubscriptionImpl(this);
}
}
}
| |
/*
* 02/25/2012
*
* Copyright (C) 2012 Robert Futrell
* robert_futrell at users.sourceforge.net
* http://fifesoft.com/rsyntaxtextarea
*
* This library is distributed under a modified BSD license. See the included
* RSTALanguageSupport.License.txt file for details.
*/
package org.fife.rsta.ac.js.ast;
import java.util.HashMap;
import org.fife.rsta.ac.js.ast.type.TypeDeclaration;
/**
* Cache Local and System scope variables Local scope variables are cleared each
* time the <code>SourceCompletionProvider</code> finishes parsing the script
* System scope variables will not be cleared
*/
public class VariableResolver {
// HashMap of local variables mapped Name --> JSVariableDeclaration
private HashMap<String, JavaScriptVariableDeclaration> localVariables =
new HashMap<String, JavaScriptVariableDeclaration>();
// pre processing variables - these are set when pre-processing
private HashMap<String, JavaScriptVariableDeclaration> preProcessedVariables =
new HashMap<String, JavaScriptVariableDeclaration>();
// HashMap of system variables mapped Name --> JSVariableDeclaration
// system variables do not get cleared as they are always available to the
// system
private HashMap<String, JavaScriptVariableDeclaration> systemVariables =
new HashMap<String, JavaScriptVariableDeclaration>();
private HashMap<String, JavaScriptFunctionDeclaration> localFunctions =
new HashMap<String, JavaScriptFunctionDeclaration>();
private HashMap<String, JavaScriptFunctionDeclaration> preProcessedFunctions =
new HashMap<String, JavaScriptFunctionDeclaration>();
/**
* Add local scope variable to cache.
*
* @param declaration variable to add
*/
public void addLocalVariable(JavaScriptVariableDeclaration declaration) {
localVariables.put(declaration.getName(), declaration);
}
/**
* Add pre-processing scope variable to cache.
*
* @param declaration variable to add
*/
public void addPreProcessingVariable(JavaScriptVariableDeclaration declaration) {
preProcessedVariables.put(declaration.getName(), declaration);
}
/**
* Add system scope variable to cache
*
* @param declaration variable to add
*/
public void addSystemVariable(JavaScriptVariableDeclaration declaration) {
systemVariables.put(declaration.getName(), declaration);
}
/**
* remove pre-processing variable from system variable cache
*
* @param name of the system variable to remove
*/
public void removePreProcessingVariable(String name) {
preProcessedVariables.remove(name);
}
/**
* remove system variable from system variable cache
*
* @param name of the system variable to remove
*/
public void removeSystemVariable(String name) {
systemVariables.remove(name);
}
/**
* Find JSVariableDeclaration for name against all variable types and check is in scope of caret position
*
* @param name
* @param dot
* @return JSVariableDeclaration from the name
*/
public JavaScriptVariableDeclaration findDeclaration(String name, int dot) {
JavaScriptVariableDeclaration findDeclaration = findDeclaration(
localVariables, name, dot);
// test whether this was found and then try pre-processing variable
findDeclaration = findDeclaration == null ? findDeclaration(
preProcessedVariables, name, dot) : findDeclaration;
// last chance... look in system variables
return findDeclaration == null ? findDeclaration(systemVariables, name,
dot) : findDeclaration;
}
public JavaScriptVariableDeclaration findDeclaration(String name, int dot, boolean local, boolean preProcessed, boolean system) {
JavaScriptVariableDeclaration findDeclaration = local ? findDeclaration(localVariables, name, dot) : null;
// test whether this was found and then try pre-processing variable
findDeclaration = findDeclaration == null ? preProcessed ? findDeclaration(preProcessedVariables, name, dot) : null : findDeclaration;
// last chance... look in system variables
return findDeclaration == null ? system ? findDeclaration(systemVariables, name, dot) : null : findDeclaration;
}
/**
* Find JSVariableDeclaration within pre-processed and system variable only. Also check is in scope of caret position
*
* @param name of variable to resolve
* @param dot position in text document
* @return JSVariableDeclaration from the name
*/
public JavaScriptVariableDeclaration findNonLocalDeclaration(String name, int dot) {
//try pre-processing variable
JavaScriptVariableDeclaration findDeclaration = findDeclaration(preProcessedVariables, name, dot);
// last chance... look in system variables
return findDeclaration == null ? findDeclaration(systemVariables, name,
dot) : findDeclaration;
}
/**
* Find JSVariableDeclaration and check the scope of the caret position
*
* @param name
* @param dot
* @return JSVariableDeclaration from the name
*/
private JavaScriptVariableDeclaration findDeclaration(
HashMap<String, JavaScriptVariableDeclaration> variables,
String name, int dot) {
JavaScriptVariableDeclaration dec = variables.get(name);
if (dec != null) {
if (dec.getCodeBlock() == null || dec.getCodeBlock().contains(dot)) {
int decOffs = dec.getOffset();
if (dot <= decOffs) {
return dec;
}
}
}
return null;
}
/**
* Find the <code>TypeDeclaration</code> for the variable and check the
* scope of the caret position
*
* @param name name of variable
* @param dot position
* @return TypeDeclaration from the name
*/
public TypeDeclaration getTypeDeclarationForVariable(String name, int dot) {
JavaScriptVariableDeclaration dec = findDeclaration(name, dot);
return dec != null ? dec.getTypeDeclaration() : null;
}
/**
* Clear all local scope variables
*/
public void resetLocalVariables() {
localVariables.clear();
localFunctions.clear();
}
public void resetPreProcessingVariables(boolean clear) {
if (clear) {
preProcessedVariables.clear();
preProcessedFunctions.clear();
}
else {
for (JavaScriptVariableDeclaration dec : preProcessedVariables.values()) {
dec.resetVariableToOriginalType();
}
}
}
public void resetSystemVariables() {
systemVariables.clear();
}
/**
* Resolve the entered text by chopping up the text and working from left to
* right, resolving each type in turn
*
* @param entered
* @param provider
* @param dot
* @return TypeDeclaration for variable name
*/
public TypeDeclaration resolveType(String varName, int dot) {
// just look up variable
return getTypeDeclarationForVariable(varName, dot);
}
public void addLocalFunction(JavaScriptFunctionDeclaration func)
{
localFunctions.put(func.getName(), func);
}
public JavaScriptFunctionDeclaration findFunctionDeclaration(String name)
{
JavaScriptFunctionDeclaration dec = localFunctions.get(name);
if(dec == null) {
dec = preProcessedFunctions.get(name);
}
return dec;
}
public JavaScriptFunctionDeclaration findFunctionDeclaration(String name, boolean local, boolean preProcessed)
{
JavaScriptFunctionDeclaration dec = local ? (JavaScriptFunctionDeclaration) localFunctions.get(name) : null;
if(dec == null) {
dec = preProcessed ? (JavaScriptFunctionDeclaration) preProcessedFunctions.get(name) : null;
}
return dec;
}
public JavaScriptFunctionDeclaration findFunctionDeclarationByFunctionName(String name, boolean local, boolean preprocessed) {
JavaScriptFunctionDeclaration func = local ? findFirstFunction(name, localFunctions) : null;
if(func == null) {
func = preprocessed ? findFirstFunction(name, preProcessedFunctions) : null;
}
return func;
}
private JavaScriptFunctionDeclaration findFirstFunction(String name,
HashMap<String, JavaScriptFunctionDeclaration> functions) {
for (JavaScriptFunctionDeclaration func : functions.values()) {
if(name.equals(func.getFunctionName())) {
return func;
}
}
return null;
}
/**
* Add pre-processing scope function to cache.
*
* @param func variable to add
*/
public void addPreProcessingFunction(
JavaScriptFunctionDeclaration func) {
preProcessedFunctions.put(func.getName(), func);
}
}
| |
/*
* Copyright 2015 Schedo Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ncode.android.apps.schedo.nearby;
import android.app.Activity;
import android.os.Handler;
import android.text.Html;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.ncode.android.apps.schedo.R;
import com.ncode.android.apps.schedo.util.LogUtils;
import java.util.*;
import static com.ncode.android.apps.schedo.util.LogUtils.makeLogTag;
/**
* Adapter class for building views related to BLE devices.
*/
public class NearbyDeviceAdapter extends BaseAdapter {
String TAG = LogUtils.makeLogTag("NearbyDeviceAdapter");
private ArrayList<NearbyDevice> mNearbyDevices;
private Activity mActivity;
private Handler mHandler = new Handler();
private long mLastChangeRequestTime = 0;
private long NOTIFY_DELAY = 300;
NearbyDeviceAdapter(Activity activity) {
mNearbyDevices = new ArrayList<NearbyDevice>();
mActivity = activity;
}
@Override
public int getCount() {
return mNearbyDevices.size();
}
@Override
public Object getItem(int i) {
return mNearbyDevices.get(i);
}
@Override
public long getItemId(int i) {
NearbyDevice device = mNearbyDevices.get(i);
return System.identityHashCode(device);
}
@Override
public View getView(int i, View view, ViewGroup container) {
if (view == null) {
view = mActivity.getLayoutInflater().inflate(R.layout.ble_listitem_device, null);
}
NearbyDevice device = mNearbyDevices.get(i);
DeviceMetadata deviceMetadata = device.getInfo();
TextView infoView;
infoView = (TextView) view.findViewById(R.id.title);
if (deviceMetadata != null) {
String title = deviceMetadata.title;
if (!TextUtils.isEmpty(title)) {
infoView.setText(Html.fromHtml(deviceMetadata.title));
infoView.setVisibility(View.VISIBLE);
} else {
infoView.setVisibility(View.GONE);
}
} else {
infoView.setText(R.string.loading);
infoView.setVisibility(View.VISIBLE);
}
infoView = (TextView) view.findViewById(R.id.url);
if (deviceMetadata != null) {
infoView.setText(deviceMetadata.siteUrl);
} else {
infoView.setText(device.getUrl());
}
infoView = (TextView) view.findViewById(R.id.description);
if (deviceMetadata != null) {
String description = deviceMetadata.description;
if (!TextUtils.isEmpty(description)) {
infoView.setText(Html.fromHtml(deviceMetadata.description));
infoView.setVisibility(View.VISIBLE);
} else {
infoView.setVisibility(View.GONE);
}
} else {
infoView.setVisibility(View.INVISIBLE);
}
ImageView iconView = (ImageView) view.findViewById(R.id.icon);
if (deviceMetadata != null) {
iconView.setImageBitmap(deviceMetadata.icon);
} else {
iconView.setImageResource(R.drawable.empty_nearby_icon);
}
return view;
}
public void addDevice(final NearbyDevice device) {
mActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
mNearbyDevices.add(device);
device.setAdapter(NearbyDeviceAdapter.this);
queueChangedNotification();
}
});
}
public NearbyDevice getExistingDevice(NearbyDevice candidateDevice) {
for (NearbyDevice device : mNearbyDevices) {
if (device.getUrl().equals(candidateDevice.getUrl())) {
return device;
}
}
return null;
}
public ArrayList<NearbyDevice> removeExpiredDevices() {
// Get a list of devices that we need to remove.
ArrayList<NearbyDevice> toRemove = new ArrayList<NearbyDevice>();
for (NearbyDevice device : mNearbyDevices) {
if (device.isLastSeenAfter(NearbyDeviceManager.MAX_INACTIVE_TIME)) {
toRemove.add(device);
}
}
// Remove those devices from the list and notify the listener.
for (final NearbyDevice device : toRemove) {
mActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
mNearbyDevices.remove(device);
queueChangedNotification();
}
});
}
return toRemove;
}
public void updateListUI() {
mActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
queueChangedNotification();
}
});
}
private Runnable mNotifyRunnable = new Runnable() {
@Override
public void run() {
notifyDataSetChanged();
}
};
public void queueChangedNotification() {
long now = System.currentTimeMillis();
// If a notification was recently issued, create a pending notification.
if (now - mLastChangeRequestTime < NOTIFY_DELAY) {
// Ignore if there's a pending timer already.
mHandler.removeCallbacks(mNotifyRunnable);
mHandler.postDelayed(mNotifyRunnable, NOTIFY_DELAY);
} else {
// Otherwise, if there's no active timer, notify immediately.
Log.i(TAG, "queueChangedNotification: Immediately notifying.");
notifyDataSetChanged();
}
}
@Override
public void notifyDataSetChanged() {
Log.i(TAG, "queueChangedNotification: notifyDataSetChanged");
Collections.sort(mNearbyDevices, mRssiComparator);
super.notifyDataSetChanged();
// Cancel the pending notification timer if there is one.
mHandler.removeCallbacks(mNotifyRunnable);
mLastChangeRequestTime = System.currentTimeMillis();
}
private Comparator<NearbyDevice> mRssiComparator = new Comparator<NearbyDevice>() {
@Override
public int compare(NearbyDevice lhs, NearbyDevice rhs) {
return rhs.getAverageRSSI() - lhs.getAverageRSSI();
}
};
}
| |
package org.apereo.cas.support.pac4j.web.flow;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apereo.cas.CasProtocolConstants;
import org.apereo.cas.CentralAuthenticationService;
import org.apereo.cas.authentication.AuthenticationResult;
import org.apereo.cas.authentication.AuthenticationSystemSupport;
import org.apereo.cas.authentication.principal.ClientCredential;
import org.apereo.cas.authentication.principal.Service;
import org.apereo.cas.services.RegisteredService;
import org.apereo.cas.services.RegisteredServiceDelegatedAuthenticationPolicy;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.services.UnauthorizedServiceException;
import org.apereo.cas.ticket.TicketGrantingTicket;
import org.apereo.cas.util.Pac4jUtils;
import org.apereo.cas.web.support.WebUtils;
import org.pac4j.core.client.BaseClient;
import org.pac4j.core.client.Client;
import org.pac4j.core.client.Clients;
import org.pac4j.core.client.IndirectClient;
import org.pac4j.core.context.WebContext;
import org.pac4j.core.credentials.Credentials;
import org.pac4j.core.exception.HttpAction;
import org.pac4j.core.profile.CommonProfile;
import org.springframework.http.HttpStatus;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.webflow.action.AbstractAction;
import org.springframework.webflow.context.ExternalContext;
import org.springframework.webflow.execution.Event;
import org.springframework.webflow.execution.RequestContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.Serializable;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class represents an action to put at the beginning of the webflow.
* <p>
* Before any authentication, redirection urls are computed for the different clients defined as well as the theme,
* locale, method and service are saved into the web session.</p>
* After authentication, appropriate information are expected on this callback url to finish the authentication
* process with the provider.
*
* @author Jerome Leleu
* @since 3.5.0
*/
@Slf4j
@Getter
@AllArgsConstructor
public class DelegatedClientAuthenticationAction extends AbstractAction {
/**
* Stop the webflow for pac4j and route to view.
*/
public static final String STOP_WEBFLOW = "stopWebflow";
/**
* Stop the webflow.
*/
public static final String STOP = "stop";
/**
* Client action state id in the webflow.
*/
public static final String CLIENT_ACTION = "clientAction";
/**
* All the urls and names of the pac4j clients.
*/
public static final String PAC4J_URLS = "pac4jUrls";
/**
* View id that stops the webflow.
*/
public static final String VIEW_ID_STOP_WEBFLOW = "casPac4jStopWebflow";
private static final Pattern PAC4J_CLIENT_SUFFIX_PATTERN = Pattern.compile("Client\\d*");
private static final Pattern PAC4J_CLIENT_CSS_CLASS_SUBSTITUTION_PATTERN = Pattern.compile("\\W");
private final Clients clients;
private final AuthenticationSystemSupport authenticationSystemSupport;
private final CentralAuthenticationService centralAuthenticationService;
private final String themeParamName;
private final String localParamName;
private final boolean autoRedirect;
private final ServicesManager servicesManager;
@Override
protected Event doExecute(final RequestContext context) throws Exception {
final HttpServletRequest request = WebUtils.getHttpServletRequestFromExternalWebflowContext(context);
final HttpServletResponse response = WebUtils.getHttpServletResponseFromExternalWebflowContext(context);
final HttpSession session = request.getSession();
final WebContext webContext = Pac4jUtils.getPac4jJ2EContext(request, response);
final String clientName = request.getParameter(this.clients.getClientNameParameter());
LOGGER.debug("Delegated authentication is handled by client name [{}]", clientName);
if (hasDelegationRequestFailed(request, response.getStatus()).isPresent()) {
return stopWebflow();
}
if (StringUtils.isNotBlank(clientName)) {
final BaseClient<Credentials, CommonProfile> client = (BaseClient<Credentials, CommonProfile>) this.clients.findClient(clientName);
LOGGER.debug("Delegated authentication client is [{}]", client);
final Service service = (Service) session.getAttribute(CasProtocolConstants.PARAMETER_SERVICE);
context.getFlowScope().put(CasProtocolConstants.PARAMETER_SERVICE, service);
LOGGER.debug("Retrieve service: [{}]", service);
if (service != null) {
request.setAttribute(CasProtocolConstants.PARAMETER_SERVICE, service.getId());
if (!isDelegatedClientAuthorizedForService(client, service)) {
LOGGER.warn("Delegated client [{}] is not authorized by service [{}]", client, service);
throw new UnauthorizedServiceException(UnauthorizedServiceException.CODE_UNAUTHZ_SERVICE, StringUtils.EMPTY);
}
}
final Credentials credentials;
try {
credentials = client.getCredentials(webContext);
LOGGER.debug("Retrieved credentials from client as [{}]", credentials);
} catch (final Exception e) {
LOGGER.debug("The request requires http action.", e);
return stopWebflow();
}
restoreRequestAttribute(request, session, this.themeParamName);
restoreRequestAttribute(request, session, this.localParamName);
restoreRequestAttribute(request, session, CasProtocolConstants.PARAMETER_METHOD);
if (credentials != null) {
final ClientCredential clientCredential = new ClientCredential(credentials);
final AuthenticationResult authenticationResult =
this.authenticationSystemSupport.handleAndFinalizeSingleAuthenticationTransaction(service, clientCredential);
final TicketGrantingTicket tgt = this.centralAuthenticationService.createTicketGrantingTicket(authenticationResult);
WebUtils.putTicketGrantingTicketInScopes(context, tgt);
return success();
}
}
// no or aborted authentication : go to login page
prepareForLoginPage(context);
if (response.getStatus() == HttpStatus.UNAUTHORIZED.value()) {
return stopWebflow();
}
if (this.autoRedirect) {
final Set<ProviderLoginPageConfiguration> urls = context.getFlowScope().get(PAC4J_URLS, Set.class);
if (urls != null && urls.size() == 1) {
final ProviderLoginPageConfiguration cfg = urls.stream().findFirst().get();
LOGGER.debug("Auto-redirecting to client url [{}]", cfg.getRedirectUrl());
response.sendRedirect(cfg.getRedirectUrl());
final ExternalContext externalContext = context.getExternalContext();
externalContext.recordResponseComplete();
return stopWebflow();
}
}
return error();
}
/**
* Prepare the data for the login page.
*
* @param context The current webflow context
*/
protected void prepareForLoginPage(final RequestContext context) {
final HttpServletRequest request = WebUtils.getHttpServletRequestFromExternalWebflowContext(context);
final HttpServletResponse response = WebUtils.getHttpServletResponseFromExternalWebflowContext(context);
final HttpSession session = request.getSession();
final WebContext webContext = Pac4jUtils.getPac4jJ2EContext(request, response);
final Service service = WebUtils.getService(context);
LOGGER.debug("Save service: [{}]", service);
session.setAttribute(CasProtocolConstants.PARAMETER_SERVICE, service);
saveRequestParameter(request, session, this.themeParamName);
saveRequestParameter(request, session, this.localParamName);
saveRequestParameter(request, session, CasProtocolConstants.PARAMETER_METHOD);
final Set<ProviderLoginPageConfiguration> urls = new LinkedHashSet<>();
this.clients.findAllClients().stream().filter(client -> client instanceof IndirectClient
&& isDelegatedClientAuthorizedForService(client, service))
.forEach(client -> {
try {
final IndirectClient indirectClient = (IndirectClient) client;
final String name = client.getName();
final Matcher matcher = PAC4J_CLIENT_SUFFIX_PATTERN.matcher(client.getClass().getSimpleName());
final String type = matcher.replaceAll(StringUtils.EMPTY).toLowerCase();
final String redirectionUrl = indirectClient.getRedirectAction(webContext).getLocation();
LOGGER.debug("[{}] -> [{}]", name, redirectionUrl);
urls.add(new ProviderLoginPageConfiguration(name, redirectionUrl, type, getCssClass(name)));
} catch (final HttpAction e) {
if (e.getCode() == HttpStatus.UNAUTHORIZED.value()) {
LOGGER.debug("Authentication request was denied from the provider [{}]", client.getName());
} else {
LOGGER.warn(e.getMessage(), e);
}
} catch (final Exception e) {
LOGGER.error("Cannot process client [{}]", client, e);
}
});
if (!urls.isEmpty()) {
context.getFlowScope().put(PAC4J_URLS, urls);
} else if (response.getStatus() != HttpStatus.UNAUTHORIZED.value()) {
LOGGER.warn("No delegated authentication providers could be determined based on the provided configuration. "
+ "Either no clients are configured, or the current access strategy rules prohibit CAS from using authentication providers for this request.");
}
}
/**
* Get a valid CSS class for the given provider name.
*
* @param name Name of the provider
*/
private String getCssClass(final String name) {
String computedCssClass = "fa fa-lock";
if (name != null) {
computedCssClass = computedCssClass.concat(" " + PAC4J_CLIENT_CSS_CLASS_SUBSTITUTION_PATTERN.matcher(name).replaceAll("-"));
}
LOGGER.debug("cssClass for [{}] is [{}]", name, computedCssClass);
return computedCssClass;
}
/**
* Restore an attribute in web session as an attribute in request.
*
* @param request The HTTP request
* @param session The HTTP session
* @param name The name of the parameter
*/
private static void restoreRequestAttribute(final HttpServletRequest request, final HttpSession session, final String name) {
final String value = (String) session.getAttribute(name);
request.setAttribute(name, value);
}
/**
* Save a request parameter in the web session.
*
* @param request The HTTP request
* @param session The HTTP session
* @param name The name of the parameter
*/
private static void saveRequestParameter(final HttpServletRequest request, final HttpSession session, final String name) {
final String value = request.getParameter(name);
if (value != null) {
session.setAttribute(name, value);
}
}
private Event stopWebflow() {
return new Event(this, STOP);
}
/**
* Determine if request has errors.
*
* @param request the request
* @param status the status
* @return the optional model and view, if request is an error.
*/
public static Optional<ModelAndView> hasDelegationRequestFailed(final HttpServletRequest request, final int status) {
final Map<String, String[]> params = request.getParameterMap();
if (params.containsKey("error") || params.containsKey("error_code") || params.containsKey("error_description") || params.containsKey("error_message")) {
final Map<String, Object> model = new HashMap<>();
if (params.containsKey("error_code")) {
model.put("code", StringEscapeUtils.escapeHtml4(request.getParameter("error_code")));
} else {
model.put("code", status);
}
model.put("error", StringEscapeUtils.escapeHtml4(request.getParameter("error")));
model.put("reason", StringEscapeUtils.escapeHtml4(request.getParameter("error_reason")));
if (params.containsKey("error_description")) {
model.put("description", StringEscapeUtils.escapeHtml4(request.getParameter("error_description")));
} else if (params.containsKey("error_message")) {
model.put("description", StringEscapeUtils.escapeHtml4(request.getParameter("error_message")));
}
model.put(CasProtocolConstants.PARAMETER_SERVICE, request.getAttribute(CasProtocolConstants.PARAMETER_SERVICE));
model.put("client", StringEscapeUtils.escapeHtml4(request.getParameter("client_name")));
LOGGER.debug("Delegation request has failed. Details are [{}]", model);
return Optional.of(new ModelAndView("casPac4jStopWebflow", model));
}
return Optional.empty();
}
private boolean isDelegatedClientAuthorizedForService(final Client client, final Service service) {
if (service != null) {
final RegisteredService registeredService = this.servicesManager.findServiceBy(service);
if (registeredService == null || !registeredService.getAccessStrategy().isServiceAccessAllowed()) {
LOGGER.warn("Service access for [{}] is denied", registeredService);
return false;
}
LOGGER.debug("Located registered service definition [{}] matching [{}]", registeredService, service);
final RegisteredServiceDelegatedAuthenticationPolicy policy = registeredService.getAccessStrategy().getDelegatedAuthenticationPolicy();
if (policy != null) {
LOGGER.debug("Evaluating delegated authentication policy [{}] for client [{}] and service [{}]", policy, client, registeredService);
if (policy.isProviderAllowed(client.getName(), registeredService)) {
LOGGER.debug("Delegated authentication policy for [{}] allows for using client [{}]", registeredService, client);
return true;
}
LOGGER.warn("Delegated authentication policy for [{}] refuses access to client [{}]", registeredService.getServiceId(), client);
return false;
}
}
return true;
}
/**
* The Provider login page configuration.
*/
@AllArgsConstructor
@Getter
@Setter
@ToString
public static class ProviderLoginPageConfiguration implements Serializable {
private static final long serialVersionUID = 6216882278086699364L;
private final String name;
private final String redirectUrl;
private final String type;
private final String cssClass;
}
}
| |
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.util.date;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import org.auraframework.util.test.annotation.UnAdaptableTest;
import org.auraframework.util.test.util.UnitTestCase;
/**
* @since: 224
*/
public class DateServiceTest extends UnitTestCase {
private StringBuilder sb;
@SuppressWarnings("serial")
private static class DebugDate extends Date {
private final String debugInfo;
DebugDate(long time, String debugInfo) {
super(time);
this.debugInfo = debugInfo;
}
/*
* }
*/
DebugDate(long time) {
this(time, "Date(" + time + ")");
}
@Override
public String toString() {
return debugInfo;
}
}
@Override
public void setUp() throws Exception{
super.setUp();
sb = new StringBuilder();
}
/**
* Test Data
*/
final Date[] DATE_TIME = { new DebugDate(1), // now
new DebugDate(1000L), // 12:00:01 AM GMT
new DebugDate(1333322872649L), // 4:27:52.649 PM PDT (GMT-7)
new DebugDate(0) // 00:00:00.000 GMT
};
public static final int[] DATE_TIME_STYLES = { DateFormat.SHORT, DateFormat.MEDIUM, DateFormat.LONG,
DateFormat.FULL, -1 };
public static final String[] SIMPLE_DATE_FORMAT_PATTERNS = { "yyyy.MM.dd G 'at' HH:mm:ss z", "EEE, MMM d, ''yy",
"h:mm a", "hh 'o''clock' a, zzzz", "K:mm a, z", "yyyyy.MMMMM.dd GGG hh:mm aaa",
"EEE, d MMM yyyy HH:mm:ss Z", "yyMMddHHmmssZ", "yyyy-MM-dd'T'HH:mm:ss.SSSZ" };
public List<LocaleConfig> getConfigs() {
List<LocaleConfig> configs = new ArrayList<>();
configs.add(new LocaleConfig(Locale.TRADITIONAL_CHINESE, TimeZone.getTimeZone("GMT+8")));
configs.add(new LocaleConfig(Locale.US, TimeZone.getTimeZone("EST")));
configs.add(new LocaleConfig(new Locale("en", "US"), TimeZone.getTimeZone("PDT")));
return configs;
}
public static class LocaleConfig {
private Locale locale = null;
private TimeZone timeZone = null;
public LocaleConfig(Locale locale) {
setLocale(locale);
setTimeZone(TimeZone.getDefault());
}
public LocaleConfig(Locale locale, TimeZone timeZone) {
setLocale(locale);
setTimeZone(timeZone);
}
public Locale getLocale() {
return this.locale;
}
public void setLocale(Locale locale) {
this.locale = locale;
}
public TimeZone getTimeZone() {
return this.timeZone;
}
public void setTimeZone(TimeZone timeZone) {
this.timeZone = timeZone;
}
}
/**
* NOTE: api's converter.format(date) and converter.parse(date) will rely on
* the TimeZone.getDefault() value to do their job. Thus, tests for these
* will return different results based on default TimeZone that is set.
*/
/**
* Tests for different converters including parsing and formatting
*/
public void testGetDateTimeISO8601Converter() throws Exception {
DateConverter converter = null;
converter = DateServiceImpl.get().getDateTimeISO8601Converter();
for (LocaleConfig c : getConfigs()) {
TimeZone tz = c.getTimeZone();
for (Date d : DATE_TIME) {
// formatting
String formattedDate = converter.format(d, tz);
// parsing
Date parsedDate = converter.parse(formattedDate, tz);
String text = "Input date:" + d.toString() + "\t\tFormatted date:" + formattedDate + "\t\tParsed date:"
+ parsedDate.getTime() + "\t\tTimezone:" + tz.getID() + "\n";
sb.append(text);
}
}
goldFileText("Test:testGetDateTimeISO8601Converter\n" + sb.toString());
}
public void testGetDateISO8601Converter() throws Exception {
DateConverter converter = null;
converter = DateServiceImpl.get().getDateISO8601Converter();
for (LocaleConfig c : getConfigs()) {
TimeZone tz = c.getTimeZone();
for (Date d : DATE_TIME) {
// formatting
String formattedDate = converter.format(d, tz);
// parsing
Date parsedDate = converter.parse(formattedDate, tz);
String text = "Input date:" + d.toString() + "\t\tFormatted date:" + formattedDate + "\t\tParsed date:"
+ parsedDate.getTime() + "\t\tTimezone:" + tz.getID() + "\n";
sb.append(text);
}
}
goldFileText("Test:testGetDateISO8601Converter\n" + sb.toString());
}
public void testGetGenericISO8601Converter() throws Exception {
String text = null;
DateConverter converter = DateServiceImpl.get().getGenericISO8601Converter();
;
// date time
for (LocaleConfig c : getConfigs()) {
TimeZone tz = c.getTimeZone();
for (Date d : DATE_TIME) {
// formatting
String formattedDate = converter.format(d, tz);
// parsing
Date parsedDate = converter.parse(formattedDate, tz);
text = "Input date:" + d.toString() + "\t\tFormatted date:" + formattedDate + "\t\tParsed date:"
+ parsedDate.getTime() + "\t\tTimezone:" + tz.getID() + "\n";
sb.append(text);
}
}
// datetime no seconds
String DATETIME_NOSECONDS[] = { "2012-06-05T13:12Z" };
for (LocaleConfig c : getConfigs()) {
TimeZone tz = c.getTimeZone();
for (String d : DATETIME_NOSECONDS) {
try {
// parsing
Date parsedDate = converter.parse(d, tz);
// formatting
String formattedDate = converter.format(parsedDate, tz);
text = "Input date:" + d.toString() + "\t\tFormatted date:" + formattedDate + "\t\tParsed date:"
+ parsedDate.getTime() + "\n";
sb.append(text);
} catch (IllegalArgumentException e) {
sb.append(e.getMessage() + "\n");
}
}
}
goldFileText("Test:testGetGenericISO8601Converter\n" + sb.toString());
}
@UnAdaptableTest("Date format on SFDC handles differently than standalone Aura, need to investigate")
public void testGetDateStyleConverter_locale_dateStyle() throws Exception {
DateConverter converter = null;
String text = null;
for (LocaleConfig c : getConfigs()) {
Locale l = c.getLocale();
TimeZone tz = c.getTimeZone();
for (Date d : DATE_TIME) {
for (int ds : DATE_TIME_STYLES) {
if (ds > -1) {
converter = DateServiceImpl.get().getDateStyleConverter(l, ds);
// formatting
String formattedDate = converter.format(d, tz);
// parsing
try {
Date parsedDate = converter.parse(formattedDate, tz);
text = "Input date:" + d.toString() + "\t\tFormatted date:" + formattedDate
+ "\t\tParsed date:" + parsedDate.getTime() + "\t\tLocale:" + l.getDisplayName()
+ "\t\tTimeZone: " + tz.getID() + "\t\tDate style: " + ds + "\n";
sb.append(text);
} catch (IllegalArgumentException e) {
sb.append(e.getMessage() + "\n");
}
}
}
}
}
goldFileText("Test:testGetDateStyleConverter_locale_dateStyle\n" + sb.toString());
}
@UnAdaptableTest("Date format on SFDC handles differently than standalone Aura, need to investigate")
public void testGetTimeStyleConverter_locale_timeStyle() throws Exception {
DateConverter converter = null;
String text = null;
for (LocaleConfig c : getConfigs()) {
Locale l = c.getLocale();
TimeZone tz = c.getTimeZone();
for (Date d : DATE_TIME) {
for (int ts : DATE_TIME_STYLES) {
if (ts > -1) {
converter = DateServiceImpl.get().getTimeStyleConverter(l, ts);
// formatting
String formattedDate = converter.format(d, tz);
// parsing
try {
Date parsedDate = converter.parse(formattedDate, tz);
text = "Input date:" + d.toString() + "\t\tFormatted date:" + formattedDate
+ "\t\tParsed date:" + parsedDate.getTime() + "\t\tLocale:" + l.getDisplayName()
+ "\t\tTimeZone: " + tz.getID() + "\t\tTime style: " + ts + "\n";
sb.append(text);
} catch (IllegalArgumentException e) {
sb.append(e.getMessage() + "\n");
}
}
}
}
}
goldFileText("Test:testGetTimeStyleConverter_locale_timeStyle\n" + sb.toString());
}
@UnAdaptableTest("Date format on SFDC handles differently than standalone Aura, need to investigate")
public void testGetDateTimeStyleConverter_locale_dateStyle_timeStyle() throws Exception {
DateConverter converter = null;
String text = null;
for (LocaleConfig c : getConfigs()) {
Locale l = c.getLocale();
TimeZone tz = c.getTimeZone();
for (Date d : DATE_TIME) {
for (int ds : DATE_TIME_STYLES) {
for (int ts : DATE_TIME_STYLES) {
if ((ds + ts) > 0) {
converter = DateServiceImpl.get().getDateTimeStyleConverter(l, ds, ts);
// formatting
String formattedDate = converter.format(d, tz);
// parsing
try {
Date parsedDate = converter.parse(formattedDate, tz);
text = "Input date:" + d.toString() + "\t\tFormatted date:" + formattedDate
+ "\t\tParsed date:" + parsedDate.getTime() + "\t\tLocale:"
+ l.getDisplayName() + "\t\tTimeZone: " + tz.getID() + "\t\tDate style: " + ds
+ "\t\tTime style: " + ts + "\n";
sb.append(text);
} catch (IllegalArgumentException e) {
sb.append(e.getMessage() + "\n");
}
}
}
}
}
}
goldFileText("Test:testGetDateTimeStyleConverter_locale_dateStyle_timeStyle\n" + sb.toString());
}
@UnAdaptableTest("Date format on SFDC handles differently than standalone Aura, need to investigate")
public void testGetPatternConverter_locale_pattern() throws Exception {
DateConverter converter = null;
String text = null;
// format/parse(date, timezone)
// SimpleDateFormat style
for (LocaleConfig c : getConfigs()) {
Locale l = c.getLocale();
TimeZone tz = c.getTimeZone();
for (Date d : DATE_TIME) {
for (String pattern : SIMPLE_DATE_FORMAT_PATTERNS) {
converter = DateServiceImpl.get().getPatternConverter(l, pattern);
// formatting
String formattedDate = converter.format(d, tz);
// parsing
try {
Date parsedDate = converter.parse(formattedDate, tz);
text = "Input date:" + d.toString() + "\t\tFormatted date:" + formattedDate
+ "\t\tParsed date:" + parsedDate.getTime() + "\t\tLocale:" + l.getDisplayName()
+ "\t\tTimeZone:" + tz.getID() + "\t\tSimpleDateFormat pattern:" + pattern + "\n";
sb.append(text);
} catch (IllegalArgumentException e) {
sb.append(e.getMessage() + "\n");
}
}
}
}
goldFileText("Test:testGetPatternConverter_locale_pattern\n" + sb.toString());
}
public void testGetStyle() {
String[] styles = { "full", "long", "medium", "short" };
int i = -1;
for (String s : styles) {
i++;
int dateFormatStyleInteger = DateServiceImpl.get().getStyle(s);
assertEquals("# date format style integer does not match for style " + s, dateFormatStyleInteger, i);
}
}
public void testDateTimeNoneConverter() {
try {
DateServiceImpl.get().getDateTimeStyleConverter(Locale.US, -1, -1);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getDateTimeStyleConverter(Locale.US, -1, -1)",
"Style '--' is invalid", e.getMessage());
}
try {
DateServiceImpl.get().getDateStyleConverter(Locale.US, -1);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getDateStyleConverter(Locale.US, -1)",
"Style '--' is invalid", e.getMessage());
}
try {
DateServiceImpl.get().getTimeStyleConverter(Locale.US, -1);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getTimeStyleConverter(Locale.US, -1)",
"Style '--' is invalid", e.getMessage());
}
}
public void testNullDataForConverters() {
try {
DateServiceImpl.get().getDateTimeStyleConverter(null, -0, -0);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getDateTimeStyleConverter(null, -0, -0)",
"Both dateStyle and timeStyle are invalid", e.getMessage());
}
try {
DateServiceImpl.get().getDateStyleConverter(null, -0);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getDateStyleConverter(null, -0)",
"Style '--' is invalid", e.getMessage());
}
try {
DateServiceImpl.get().getTimeStyleConverter(null, -0);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getTimeStyleConverter(null, -0)",
"Style '--' is invalid", e.getMessage());
}
try {
DateServiceImpl.get().getPatternConverter(null, null);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getPatternConverter(null, null)",
"Invalid pattern specification", e.getMessage());
}
try {
DateServiceImpl.get().getStyle(null);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getStyle(null)", "Style is null", e.getMessage());
}
}
public void testNullDataForFormatAndParse() {
try {
DateServiceImpl.get().getDateTimeISO8601Converter().format(null);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getDateTimeISO8601Converter().format(null)",
"Date can not be null", e.getMessage());
}
try {
DateServiceImpl.get().getDateTimeISO8601Converter().format(null, null);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getDateTimeISO8601Converter().format(null, null)",
"Date can not be null", e.getMessage());
}
try {
DateServiceImpl.get().getDateTimeISO8601Converter().parse(null);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getDateTimeISO8601Converter().parse(null)",
"Date can not be null", e.getMessage());
}
try {
DateServiceImpl.get().getDateTimeISO8601Converter().parse(null, null);
} catch (IllegalArgumentException e) {
assertEquals("# Incorrect exception message for api getDateTimeISO8601Converter().parse(null, null)",
"Date can not be null", e.getMessage());
}
}
public void testFormatWithTimeZone() {
// this is equivalent to 1970-01-01 5pm EST
long offsetEST = 5 * 60 * 60 * 1000;
Date testDate = new Date(offsetEST);
DateConverter converter = DateServiceImpl.get().getDateISO8601Converter();
// gmt-8 here is the equivalent of using the default JDK timezone
// hardcoded so the test works wherever it is run
String result = converter.format(testDate, TimeZone.getTimeZone("GMT-8"));
// 1970-01-01 midnight EST is 1969-12-31 9pm PST
assertEquals("1969-12-31", result);
// 1970-01-01 midnight EST should match EST, right?
result = converter.format(testDate, TimeZone.getTimeZone("GMT-5"));
assertEquals("1970-01-01", result);
// switch to datetime converter
converter = DateServiceImpl.get().getDateTimeISO8601Converter();
// gmt-8 here is the equivalent of using the default JDK timezone
// hardcoded so the test works wherever it is run
result = converter.format(testDate, TimeZone.getTimeZone("GMT-8"));
// 9PM PST = midnight EST, right? note the 21:00 and -08:00
assertEquals("1969-12-31T21:00:00.000-08:00", result);
// and a quick reverse check to verify
assertEquals(offsetEST, converter.parse(result).getTime());
// 1970-01-01 midnight EST should match EST
result = converter.format(testDate, TimeZone.getTimeZone("GMT-5"));
assertEquals("1970-01-01T00:00:00.000-05:00", result);
}
public void testParseWithTimeZone() {
// if someone types in 1970-01-01, and they're in GMT - that's date=0L
String testDate = "1970-01-01";
long offsetEST = 5 * 60 * 60 * 1000;
long offsetGMT8 = -(8 * 60 * 60 * 1000);
DateConverter converter = DateServiceImpl.get().getDateISO8601Converter();
// 1970-01-01, and they're in EST, that's 5 hours behind.
// but when they hit 1970-01-01, they're 5 hours later than when GMT
// folks hit it
// date=0 PLUS (5x60x60x1000)
Date resultDate = converter.parse(testDate, TimeZone.getTimeZone("EST"));
assertEquals(offsetEST, resultDate.getTime());
// 1970-01-01, and they're in China, that's 8 hours ahead of GMT.
// but when they hit 1970-01-01, they're 8 hours earlier than when GMT
// folks hit it
// date=0 MINUS (8x60x60x1000)
resultDate = converter.parse(testDate, TimeZone.getTimeZone("GMT+8"));
assertEquals(offsetGMT8, resultDate.getTime());
// If parse, without timezone is called, the ISO8601's default
// timezone should be used ("GMT").
resultDate = converter.parse(testDate);
assertEquals(-TimeZone.getTimeZone("GMT").getRawOffset(), resultDate.getTime());
// switch to dateTime converter
converter = DateServiceImpl.get().getDateTimeISO8601Converter();
// timezone is in this date - EST again - so we should get the same as
// above
testDate = "1970-01-01T00:00:00.000-05:00";
resultDate = converter.parse(testDate);
assertEquals(offsetEST, resultDate.getTime());
// specifying a timezone shouldn't change a thing
resultDate = converter.parse(testDate, TimeZone.getTimeZone("GMT+8"));
assertEquals(offsetEST, resultDate.getTime());
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Joiner;
/**
* Tests OptimizeReturns
* @author johnlenz@google.com (John Lenz)
*/
public class OptimizeReturnsTest extends CompilerTestCase {
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new OptimizeReturns(compiler);
}
private static final String EXTERNAL_SYMBOLS =
"var extern;extern.externalMethod";
public OptimizeReturnsTest() {
super(EXTERNAL_SYMBOLS);
}
@Override
protected int getNumRepetitions() {
// run pass once.
return 1;
}
@Override
protected void setUp() throws Exception {
super.setUp();
super.enableLineNumberCheck(true);
disableTypeCheck();
}
/**
* Combine source strings using '\n' as the separator.
*/
private static String newlineJoin(String ... parts) {
return Joiner.on("\n").join(parts);
}
public void testNoRewriteUsedResult1() throws Exception {
String source = newlineJoin(
"function a(){return 1}",
"var x = a()");
testSame(source);
}
public void testNoRewriteUsedResult2() throws Exception {
String source = newlineJoin(
"var a = function(){return 1}",
"a(); var b = a()");
testSame(source);
}
public void testRewriteUnusedResult1() throws Exception {
String source = newlineJoin(
"function a(){return 1}",
"a()");
String expected = newlineJoin(
"function a(){return}",
"a()");
test(source, expected);
}
public void testRewriteUnusedResult2() throws Exception {
String source = newlineJoin(
"var a; a = function(){return 1}",
"a()");
String expected = newlineJoin(
"var a; a = function(){return}",
"a()");
test(source, expected);
}
public void testRewriteUnusedResult3() throws Exception {
String source = newlineJoin(
"var a = function(){return 1}",
"a()");
String expected = newlineJoin(
"var a = function(){return}",
"a()");
test(source, expected);
}
public void testRewriteUnusedResult4a() throws Exception {
String source = newlineJoin(
"var a = function(){return a()}",
"a()");
testSame(source);
}
public void testRewriteUnusedResult4b() throws Exception {
String source = newlineJoin(
"var a = function b(){return b()}",
"a()");
testSame(source);
}
public void testRewriteUnusedResult4c() throws Exception {
String source = newlineJoin(
"function a(){return a()}",
"a()");
testSame(source);
}
public void testRewriteUnusedResult5() throws Exception {
String source = newlineJoin(
"function a(){}",
"a.prototype.foo = function(args) {return args};",
"var o = new a;",
"o.foo()");
String expected = newlineJoin(
"function a(){}",
"a.prototype.foo = function(args) {return};",
"var o = new a;",
"o.foo()");
test(source, expected);
}
public void testRewriteUnusedResult6() throws Exception {
String source = newlineJoin(
"function a(){return (g = 1)}",
"a()");
String expected = newlineJoin(
"function a(){g = 1;return}",
"a()");
test(source, expected);
}
public void testRewriteUnusedResult7a() throws Exception {
String source = newlineJoin(
"function a() { return 1 }",
"function b() { return a() }",
"function c() { return b() }",
"c();");
String expected = newlineJoin(
"function a() { return 1 }",
"function b() { return a() }",
"function c() { b(); return }",
"c();");
test(source, expected);
}
public void testRewriteUnusedResult7b() throws Exception {
String source = newlineJoin(
"c();",
"function c() { return b() }",
"function b() { return a() }",
"function a() { return 1 }");
// Iteration 1.
String expected = newlineJoin(
"c();",
"function c() { b(); return }",
"function b() { return a() }",
"function a() { return 1 }");
test(source, expected);
// Iteration 2.
source = expected;
expected = newlineJoin(
"c();",
"function c() { b(); return }",
"function b() { a(); return }",
"function a() { return 1 }");
test(source, expected);
// Iteration 3.
source = expected;
expected = newlineJoin(
"c();",
"function c() { b(); return }",
"function b() { a(); return }",
"function a() { return }");
test(source, expected);
}
public void testRewriteUnusedResult8() throws Exception {
String source = newlineJoin(
"function a() { return c() }",
"function b() { return a() }",
"function c() { return b() }",
"c();");
testSame(source);
}
public void testNoRewriteObjLit1() throws Exception {
String source = newlineJoin(
"var a = {b:function(){return 1;}}",
"for(c in a) (a[c])();",
"a.b()");
testSame(source);
}
public void testNoRewriteObjLit2() throws Exception {
String source = newlineJoin(
"var a = {b:function fn(){return 1;}}",
"for(c in a) (a[c])();",
"a.b()");
testSame(source);
}
public void testNoRewriteArrLit() throws Exception {
String source = newlineJoin(
"var a = [function(){return 1;}]",
"(a[0])();");
testSame(source);
}
public void testPrototypeMethod1() throws Exception {
String source = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return 1}",
"var x = new c;",
"x.a()");
String result = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return}",
"var x = new c;",
"x.a()");
test(source, result);
}
public void testPrototypeMethod2() throws Exception {
String source = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return 1}",
"goog.reflect.object({a: 'v'})",
"var x = new c;",
"x.a()");
testSame(source);
}
public void testPrototypeMethod3() throws Exception {
String source = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return 1}",
"var x = new c;",
"for(var key in goog.reflect.object({a: 'v'})){ x[key](); }",
"x.a()");
testSame(source);
}
public void testPrototypeMethod4() throws Exception {
String source = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return 1}",
"var x = new c;",
"for(var key in goog.reflect.object({a: 'v'})){ x[key](); }");
testSame(source);
}
public void testCallOrApply() throws Exception {
// TODO(johnlenz): Add support for .call and .apply
testSame("function a() {return 1}; a.call(new foo);");
testSame("function a() {return 1}; a.apply(new foo);");
}
public void testRewriteUseSiteRemoval() throws Exception {
String source = newlineJoin(
"function a() { return {\"_id\" : 1} }",
"a();");
String expected = newlineJoin(
"function a() { return }",
"a();");
test(source, expected);
}
}
| |
import java.awt.Color;
import java.awt.GradientPaint;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JComponent;
import javax.swing.JEditorPane;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JScrollPane;
import javax.swing.JTextField;
import javax.swing.SwingUtilities;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import org.jdesktop.animation.timing.Animator;
import org.jdesktop.animation.timing.interpolation.PropertySetter;
import org.jdesktop.animation.transitions.Effect;
import org.jdesktop.animation.transitions.EffectsManager;
import org.jdesktop.animation.transitions.ScreenTransition;
import org.jdesktop.animation.transitions.TransitionTarget;
import org.jdesktop.animation.transitions.effects.CompositeEffect;
import org.jdesktop.animation.transitions.effects.FadeIn;
/*
* SearchTransition.java
*
* Created on May 3, 2007, 3:05 PM
*
* Copyright (c) 2007, Sun Microsystems, Inc
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of the TimingFramework project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
*
* @author Chet
*/
public class SearchTransition extends JComponent implements TransitionTarget,
ActionListener {
//
// GUI components used in the application screens
//
JLabel instructions = new JLabel("Search and ye shall find...");
JLabel searchLabel = new JLabel("Search:");
JTextField searchField = new JTextField("");
JEditorPane results = new JEditorPane("text/html",
"<html><body><b>Dung Beetles</b>: An Ode<br/>" +
"My Life with <b>Dung Beetles</b><br/>" +
"<b>Beetle</b> Bailey Gets Latrine Duty<br/>" +
"Evolution's Oddities<br/>" +
"Society's Parasites<br/>" +
"You <b>Dung</b> Me Wrong: A Country Music History<br/>" +
"Ding, <b>Dung</b>, The Witch is Dead<br/>" +
"'To be or not to <b>beetle</b>'<br/>" +
"Gross Insects of the World<br/>" +
"Nature's Sanitation Engineers<br/>" +
"Why are they here?<br/>" +
"</body></html>");
JScrollPane scroller = new JScrollPane(results);
private static final int LABEL_W = 50;
private static final int LABEL_H = 20;
private static final int FIELD_W = 100;
private static final int FIELD_H = 20;
private static final int INSTRUCTIONS_W = 170;
private static final int INSTRUCTIONS_H = 20;
private static final int RESULTS_X = 30;
//
// Animation variables
//
Animator animator = new Animator(500); // Animate for half-second
// Setup transition with:
// "this" as the transition container
// "this" as the TransitionTarget callback object
// animator as the animator that drives the transition
ScreenTransition transition = new ScreenTransition(this,
this, animator);
private CompositeEffect moverFader = null;
//
// Misc other instance variables
//
private int currentScreen = 0; // Which screen are we on?
private int prevHeight = -1;
Paint bgGradient = null;
int prevW, prevH;
/** Creates a new instance of SearchTransition */
public SearchTransition() {
results.setEditable(false);
// Setup the animation parameters
animator.setAcceleration(.2f); // Accelerate for first 20%
animator.setDeceleration(.4f); // Decelerate for last 40%
// Set this as the listener for entries in the search field
searchField.addActionListener(this);
instructions.setFont(instructions.getFont().deriveFont(15f));
}
@Override
public void setBounds(int x, int y, int w, int h) {
super.setBounds(x, y, w, h);
if (w != prevW || h != prevH) {
// Setup GUI for current screen given new size of our container
setupNextScreen();
prevW = w;
prevH = h;
}
}
/**
* Arrange the GUI for the initial search screen.
*/
private void setupSearchScreen() {
int instructionsX = (getWidth() - INSTRUCTIONS_W) / 2;
int instructionsY = getHeight() / 4;
int searchX = (getWidth() - LABEL_W - FIELD_W - 10) / 2;
int searchY = instructionsY + INSTRUCTIONS_H + 20;
int fieldX = searchX + LABEL_W + 10;
int fieldY = searchY;
add(instructions);
add(searchLabel);
add(searchField);
instructions.setBounds(instructionsX, instructionsY,
INSTRUCTIONS_W, INSTRUCTIONS_H);
searchLabel.setBounds(searchX, searchY, LABEL_W, LABEL_H);
searchField.setBounds(fieldX, fieldY, FIELD_W, FIELD_H);
}
/**
* Arrange the GUI for the results screen
*/
public void setupResultsScreen() {
int searchX = getWidth() - LABEL_W - FIELD_W - RESULTS_X - 10;
int searchY = 10;
int fieldX = searchX + LABEL_W + 10;
int fieldY = searchY;
int resultsX = RESULTS_X;
int resultsY = searchY + LABEL_H + 20;
add(searchLabel);
add(searchField);
add(scroller);
searchLabel.setBounds(searchX, 10, LABEL_W, LABEL_H);
searchField.setBounds(fieldX, fieldY, FIELD_W, FIELD_H);
scroller.setBounds(resultsX, resultsY,
getWidth() - (2 * resultsX), getHeight() - resultsY - 20);
}
/**
* Change the gradient and effect according to the new window size
*/
private void setupBackgroundAndEffect() {
// init the background gradient according to current height
bgGradient = new GradientPaint(0, 0, Color.LIGHT_GRAY.brighter(),
0, getHeight(), Color.DARK_GRAY.brighter());
// Init resultsEffect with current component size info
MoveIn mover = new MoveIn(RESULTS_X, getHeight());
FadeIn fader = new FadeIn();
moverFader = new CompositeEffect(mover);
moverFader.addEffect(fader);
EffectsManager.setEffect(scroller, moverFader, EffectsManager.TransitionType.APPEARING);
prevHeight = getHeight();
}
/**
* Override of paintComponent() to draw the gradient background
*/
@Override
protected void paintComponent(Graphics g) {
if (bgGradient == null || getHeight() != prevHeight) {
setupBackgroundAndEffect();
}
((Graphics2D)g).setPaint(bgGradient);
g.fillRect(0, 0, getWidth(), getHeight());
}
/**
* TransitionTarget callback; clear current state and set up
* state for next screen
*/
public void setupNextScreen() {
// Clear out current GUI state
removeAll();
switch (currentScreen) {
case 0:
setupSearchScreen();
break;
case 1:
setupResultsScreen();
break;
default:
break;
}
}
// Handle user hitting Enter in the search field
public void actionPerformed(ActionEvent ae) {
if (moverFader == null || prevHeight != getHeight()) {
setupBackgroundAndEffect();
}
// Change currentScreen, used later in setupNextScreen() callback
currentScreen = (currentScreen == 0) ? 1 : 0;
transition.start();
}
private static void createAndShowGUI() {
JFrame f = new JFrame();
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
f.setSize(400, 300);
SearchTransition component = new SearchTransition();
f.add(component);
f.setVisible(true);
}
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException ex) {
ex.printStackTrace();
} catch (InstantiationException ex) {
ex.printStackTrace();
} catch (IllegalAccessException ex) {
ex.printStackTrace();
} catch (UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
}
Runnable doCreateAndShowGUI = new Runnable() {
public void run() {
createAndShowGUI();
}
};
SwingUtilities.invokeLater(doCreateAndShowGUI);
}
}
/**
* Custom effect: moves a component in to its end location
* from a specified starting point
*/
class MoveIn extends Effect {
private Point startLocation = new Point();
public MoveIn(int x, int y) {
startLocation.x = x;
startLocation.y = y;
}
/**
* Handles setup of animation that will vary the location during the
* transition
*/
@Override
public void init(Animator animator, Effect parentEffect) {
Effect targetEffect = (parentEffect == null) ? this : parentEffect;
PropertySetter ps;
ps = new PropertySetter(targetEffect, "location",
startLocation, new Point(getEnd().getX(), getEnd().getY()));
animator.addTarget(ps);
super.init(animator, parentEffect);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rya.api.domain;
import java.util.Date;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import org.openrdf.model.URI;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.model.vocabulary.XMLSchema;
import com.google.common.collect.ImmutableMap;
/**
* Utility methods for using {@link RyaType}.
*/
public final class RyaTypeUtils {
private static final ImmutableMap<Class<?>, RyaTypeMethod> METHOD_MAP =
ImmutableMap.<Class<?>, RyaTypeMethod>builder()
.put(Boolean.class, (v) -> booleanRyaType((Boolean) v))
.put(Byte.class, (v) -> byteRyaType((Byte) v))
.put(Date.class, (v) -> dateRyaType((Date) v))
.put(DateTime.class, (v) -> dateRyaType((DateTime) v))
.put(Double.class, (v) -> doubleRyaType((Double) v))
.put(Float.class, (v) -> floatRyaType((Float) v))
.put(Integer.class, (v) -> intRyaType((Integer) v))
.put(Long.class, (v) -> longRyaType((Long) v))
.put(Short.class, (v) -> shortRyaType((Short) v))
.put(String.class, (v) -> stringRyaType((String) v))
.put(URI.class, (v) -> uriRyaType((URI) v))
.put(URIImpl.class, (v) -> uriRyaType((URIImpl) v))
.build();
/**
* Represents a method inside the {@link RyaTypeUtils} class that can be
* called.
*/
private static interface RyaTypeMethod {
/**
* Calls the method within {@link RyaTypeUtils} with the supplied value.
* @param value the object value.
* @return the {@link RyaType}.
*/
public RyaType callRyaTypeMethod(final Object value);
}
/**
* Private constructor to prevent instantiation.
*/
private RyaTypeUtils() {
}
/**
* Creates a boolean {@link RyaType} object.
* @param value the {@link Boolean} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#BOOLEAN} and the data set to the specified
* {@code value}.
*/
public static RyaType booleanRyaType(final Boolean value) {
return new RyaType(XMLSchema.BOOLEAN, Boolean.toString(value));
}
/**
* Creates a byte {@link RyaType} object.
* @param value the {@link Byte} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#BYTE} and the data set to the specified {@code value}.
*/
public static RyaType byteRyaType(final Byte value) {
return new RyaType(XMLSchema.BYTE, Byte.toString(value));
}
/**
* Creates a date {@link RyaType} object.
* @param value the {@link Date} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#DATETIME} and the data set to the specified
* {@code value}.
*/
public static RyaType dateRyaType(final Date value) {
final DateTime dateTime = new DateTime(value.getTime());
final StringBuffer sb = new StringBuffer();
ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC).printTo(sb, dateTime.getMillis());
final String formattedDate = sb.toString();
return new RyaType(XMLSchema.DATE, formattedDate);
}
/**
* Creates a date/time {@link RyaType} object.
* @param value the {@link DateTime} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#DATETIME} and the data set to the specified
* {@code value}.
*/
public static RyaType dateRyaType(final DateTime value) {
final StringBuffer sb = new StringBuffer();
ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC).printTo(sb, value.getMillis());
final String formattedDate = sb.toString();
return new RyaType(XMLSchema.DATETIME, formattedDate);
}
/**
* Creates a double {@link RyaType} object.
* @param value the {@link Double} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#DOUBLE} and the data set to the specified {@code value}.
*/
public static RyaType doubleRyaType(final Double value) {
return new RyaType(XMLSchema.DOUBLE, Double.toString(value));
}
/**
* Creates a float {@link RyaType} object.
* @param value the {@link Float} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#FLOAT} and the data set to the specified {@code value}.
*/
public static RyaType floatRyaType(final Float value) {
return new RyaType(XMLSchema.FLOAT, Float.toString(value));
}
/**
* Creates an integer {@link RyaType} object.
* @param value the {@link Integer} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#INTEGER} and the data set to the specified
* {@code value}.
*/
public static RyaType intRyaType(final Integer value) {
return new RyaType(XMLSchema.INTEGER, Integer.toString(value));
}
/**
* Creates a long {@link RyaType} object.
* @param value the {@link Long} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#LONG} and the data set to the specified {@code value}.
*/
public static RyaType longRyaType(final Long value) {
return new RyaType(XMLSchema.LONG, Long.toString(value));
}
/**
* Creates a short {@link RyaType} object.
* @param value the {@link Short} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#SHORT} and the data set to the specified {@code value}.
*/
public static RyaType shortRyaType(final Short value) {
return new RyaType(XMLSchema.SHORT, Short.toString(value));
}
/**
* Creates a string {@link RyaType} object.
* @param value the {@link String} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#STRING} and the data set to the specified {@code value}.
*/
public static RyaType stringRyaType(final String value) {
return new RyaType(XMLSchema.STRING, value);
}
/**
*
* Creates a URI {@link RyaType} object.
* @param value the {@link URI} object.
* @return the {@link RyaType} with the data type set to
* {@link XMLSchema#ANYURI} and the data set to the specified {@code value}.
*/
public static RyaType uriRyaType(final URI value) {
return new RyaType(XMLSchema.ANYURI, value.stringValue());
}
/**
* Calls the appropriate {@link RyaTypeUtils} method based on the class
* specified and initializes it with the supplied value.
* @param classType the {@link Class} of {@link RyaType} to find.
* @param value the value to initialize the {@link RyaType} with.
* @return the {@link RyaType} or {@code null} if none could be found for
* the specified {@code classType}.
*/
public static RyaType getRyaTypeForClass(final Class<?> classType, final Object value) {
final RyaTypeMethod method = METHOD_MAP.get(classType);
RyaType ryaType = null;
if (method != null) {
ryaType = method.callRyaTypeMethod(value);
}
return ryaType;
}
}
| |
/*
* Copyright 2001-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.java.generate.psi;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PropertyUtilBase;
import com.intellij.util.ArrayUtilRt;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.intellij.psi.CommonClassNames.*;
/**
* Basic PSI Adapter with common function that works in all supported versions of IDEA.
*/
public final class PsiAdapter {
private PsiAdapter() {}
/**
* Returns true if a field is constant.
* <p/>
* This is identified as the name of the field is only in uppercase and it has
* a {@code static} modifier.
*
* @param field field to check if it's a constant
* @return true if constant.
*/
public static boolean isConstantField(PsiField field) {
PsiModifierList list = field.getModifierList();
if (list == null) {
return false;
}
// modifier must be static
if (!list.hasModifierProperty(PsiModifier.STATIC)) {
return false;
}
// name must NOT have any lowercase character
return !StringUtil.hasLowerCaseChar(field.getName());
}
/**
* Finds an existing method with the given name.
* If there isn't a method with the name, null is returned.
*
* @param clazz the class
* @param name name of method to find
* @return the found method, null if none exist
*/
@Nullable
public static PsiMethod findMethodByName(PsiClass clazz, String name) {
PsiMethod[] methods = clazz.getMethods();
// use reverse to find from bottom as the duplicate conflict resolution policy requires this
for (int i = methods.length - 1; i >= 0; i--) {
PsiMethod method = methods[i];
if (name.equals(method.getName())) {
return method;
}
}
return null;
}
/**
* Returns true if the given field a primitive array type (e.g., int[], long[], float[]).
*
* @param type type.
* @return true if field is a primitive array type.
*/
public static boolean isPrimitiveArrayType(PsiType type) {
return type instanceof PsiArrayType && isPrimitiveType(((PsiArrayType) type).getComponentType());
}
/**
* Is the type an Object array type (etc. String[], Object[])?
*
* @param type type.
* @return true if it's an Object array type.
*/
public static boolean isObjectArrayType(PsiType type) {
return type instanceof PsiArrayType && !isPrimitiveType(((PsiArrayType) type).getComponentType());
}
/**
* Is the type a String array type (etc. String[])?
*
* @param type type.
* @return true if it's a String array type.
*/
public static boolean isStringArrayType(PsiType type) {
if (isPrimitiveType(type))
return false;
return type.getCanonicalText().indexOf("String[]") > 0;
}
/**
* Is the given field a {@link java.util.Collection} type?
*
* @param factory element factory.
* @param type type.
* @return true if it's a Collection type.
*/
public static boolean isCollectionType(PsiElementFactory factory, PsiType type) {
return isTypeOf(factory, type, "java.util.Collection");
}
/**
* Is the given field a {@link java.util.Map} type?
*
* @param factory element factory.
* @param type type.
* @return true if it's a Map type.
*/
public static boolean isMapType(PsiElementFactory factory, PsiType type) {
return isTypeOf(factory, type, JAVA_UTIL_MAP);
}
/**
* Is the given field a {@link java.util.Set} type?
*
* @param factory element factory.
* @param type type.
* @return true if it's a Map type.
*/
public static boolean isSetType(PsiElementFactory factory, PsiType type) {
return isTypeOf(factory, type, JAVA_UTIL_SET);
}
/**
* Is the given field a {@link java.util.List} type?
*
* @param factory element factory.
* @param type type.
* @return true if it's a Map type.
*/
public static boolean isListType(PsiElementFactory factory, PsiType type) {
return isTypeOf(factory, type, JAVA_UTIL_LIST);
}
/**
* Is the given field a {@link String} type?
*
* @param factory element factory.
* @param type type.
* @return true if it's a String type.
*/
public static boolean isStringType(PsiElementFactory factory, PsiType type) {
return isTypeOf(factory, type, JAVA_LANG_STRING);
}
/**
* Is the given field assignable from {@link Object}?
*
* @param factory element factory.
* @param type type.
* @return true if it's an Object type.
*/
public static boolean isObjectType(PsiElementFactory factory, PsiType type) {
return isTypeOf(factory, type, JAVA_LANG_OBJECT);
}
/**
* Is the given field a {@link java.util.Date} type?
*
* @param factory element factory.
* @param type type.
* @return true if it's a Date type.
*/
public static boolean isDateType(PsiElementFactory factory, PsiType type) {
return isTypeOf(factory, type, "java.util.Date");
}
/**
* Is the given field a {@link java.util.Calendar} type?
*
* @param factory element factory.
* @param type type.
* @return true if it's a Calendar type.
*/
public static boolean isCalendarType(PsiElementFactory factory, PsiType type) {
return isTypeOf(factory, type, "java.util.Calendar");
}
/**
* Is the given field a {@link Boolean} type or a primitive boolean type?
*
* @param factory element factory.
* @param type type.
* @return true if it's a Boolean or boolean type.
*/
public static boolean isBooleanType(PsiElementFactory factory, PsiType type) {
if (isPrimitiveType(type)) {
// test for simple type of boolean
String s = type.getCanonicalText();
return "boolean".equals(s);
} else {
// test for Object type of Boolean
return isTypeOf(factory, type, JAVA_LANG_BOOLEAN);
}
}
/**
* Is the given field a numeric type (assignable from java.lang.Numeric or a primitive type of byte, short, int, long, float, double type)?
*
* @param factory element factory.
* @param type type.
* @return true if it's a numeric type.
*/
public static boolean isNumericType(PsiElementFactory factory, PsiType type) {
if (isPrimitiveType(type)) {
// test for simple type of numeric
String s = type.getCanonicalText();
return "byte".equals(s) || "double".equals(s) || "float".equals(s) || "int".equals(s) || "long".equals(s) || "short".equals(s);
} else {
// test for Object type of numeric
return isTypeOf(factory, type, "java.lang.Number");
}
}
/**
* Does the javafile have the import statement?
*
* @param javaFile javafile.
* @param importStatement import statement to test existing for.
* @return true if the javafile has the import statement.
*/
public static boolean hasImportStatement(PsiJavaFile javaFile, String importStatement) {
PsiImportList importList = javaFile.getImportList();
if (importList == null) {
return false;
}
if (importStatement.endsWith(".*")) {
return importList.findOnDemandImportStatement(fixImportStatement(importStatement)) != null;
} else {
return importList.findSingleClassImportStatement(importStatement) != null;
}
}
/**
* Adds an import statement to the javafile and optimizes the imports afterwards.
*
*
* @param javaFile javafile.
* @param importStatementOnDemand name of import statement, must be with a wildcard (etc. java.util.*).
* @throws IncorrectOperationException
* is thrown if there is an error creating the import statement.
*/
public static void addImportStatement(PsiJavaFile javaFile, String importStatementOnDemand) {
PsiElementFactory factory = JavaPsiFacade.getElementFactory(javaFile.getProject());
PsiImportStatement is = factory.createImportStatementOnDemand(fixImportStatement(importStatementOnDemand));
// add the import to the file, and optimize the imports
PsiImportList importList = javaFile.getImportList();
if (importList != null) {
importList.add(is);
}
JavaCodeStyleManager.getInstance(javaFile.getProject()).optimizeImports(javaFile);
}
/**
* Fixes the import statement to be returned as packagename only (without .* or any Classname).
* <p/>
* <br/>Example: java.util will be returned as java.util
* <br/>Example: java.util.* will be returned as java.util
* <br/>Example: java.text.SimpleDateFormat will be returned as java.text
*
* @param importStatementOnDemand import statement
* @return import statement only with packagename
*/
private static String fixImportStatement(String importStatementOnDemand) {
if (importStatementOnDemand.endsWith(".*")) {
return importStatementOnDemand.substring(0, importStatementOnDemand.length() - 2);
} else {
boolean hasClassname = StringUtil.hasUpperCaseChar(importStatementOnDemand);
if (hasClassname) {
// extract packagename part
int pos = importStatementOnDemand.lastIndexOf(".");
return importStatementOnDemand.substring(0, pos);
} else {
// it is a pure packagename
return importStatementOnDemand;
}
}
}
/**
* Gets the fields fully qualified classname (etc java.lang.String, java.util.ArrayList)
*
* @param type the type.
* @return the fully qualified classname, null if the field is a primitive.
* @see #getTypeClassName(PsiType) for the non qualified version.
*/
@Nullable
public static String getTypeQualifiedClassName(PsiType type) {
if (isPrimitiveType(type)) {
return null;
}
// avoid [] if the type is an array
String name = type.getCanonicalText();
if (name.endsWith("[]")) {
return name.substring(0, name.length() - 2);
}
return name;
}
/**
* Gets the fields classname (etc. String, ArrayList)
*
* @param type the type.
* @return the classname, null if the field is a primitive.
* @see #getTypeQualifiedClassName(PsiType) for the qualified version.
*/
@Nullable
public static String getTypeClassName(PsiType type) {
String name = getTypeQualifiedClassName(type);
// return null if it was a primitive type
if (name == null) {
return null;
}
return StringUtil.getShortName(name);
}
/**
* Finds the public static void main(String[] args) method.
*
* @param clazz the class.
* @return the method if it exists, null if not.
*/
@Nullable
public static PsiMethod findPublicStaticVoidMainMethod(PsiClass clazz) {
PsiMethod[] methods = clazz.findMethodsByName("main", false);
// is it public static void main(String[] args)
for (PsiMethod method : methods) {
// must be public
if (!method.hasModifierProperty(PsiModifier.PUBLIC)) {
continue;
}
// must be static
if (!method.hasModifierProperty(PsiModifier.STATIC)) {
continue;
}
// must have void as return type
PsiType returnType = method.getReturnType();
if (!PsiType.VOID.equals(returnType)) {
continue;
}
// must have one parameter
PsiParameter[] parameters = method.getParameterList().getParameters();
if (parameters.length != 1) {
continue;
}
// parameter must be string array
if (!isStringArrayType(parameters[0].getType())) {
continue;
}
// public static void main(String[] args) method found
return method;
}
// main not found
return null;
}
/**
* Add or replaces the javadoc comment to the given method.
*
* @param method the method the javadoc should be added/set to.
* @param javadoc the javadoc comment.
* @param replace true if any existing javadoc should be replaced. false will not replace any existing javadoc and thus leave the javadoc untouched.
* @return the added/replace javadoc comment, null if the was an existing javadoc and it should <b>not</b> be replaced.
* @throws IncorrectOperationException is thrown if error adding/replacing the javadoc comment.
*/
@Nullable
public static PsiComment addOrReplaceJavadoc(PsiMethod method, String javadoc, boolean replace) {
final Project project = method.getProject();
PsiElementFactory factory = JavaPsiFacade.getElementFactory(project);
PsiComment comment = factory.createCommentFromText(javadoc, null);
// does a method already exists?
PsiDocComment doc = method.getDocComment();
if (doc != null) {
if (replace) {
// javadoc already exists, so replace
doc.replace(comment);
final CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(project);
codeStyleManager.reformat(method); // to reformat javadoc
return comment;
} else {
// do not replace existing javadoc
return null;
}
} else {
// add new javadoc
method.addBefore(comment, method.getFirstChild());
final CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(project);
codeStyleManager.reformat(method); // to reformat javadoc
return comment;
}
}
/**
* Is the given type a "void" type.
*
* @param type the type.
* @return true if a void type, false if not.
*/
public static boolean isTypeOfVoid(PsiType type) {
return type != null && type.equalsToText("void");
}
/**
* Is the method a getter method?
* <p/>
* The name of the method must start with {@code get} or {@code is}.
* And if the method is a {@code isXXX} then the method must return a java.lang.Boolean or boolean.
*
*
* @param method the method
* @return true if a getter method, false if not.
*/
public static boolean isGetterMethod(PsiMethod method) {
// must not be a void method
if (isTypeOfVoid(method.getReturnType())) {
return false;
}
final PsiParameterList parameterList = method.getParameterList();
if (!parameterList.isEmpty()) {
return false;
}
return true;
}
/**
* Gets the field name of the getter method.
* <p/>
* The method must be a getter method for a field.
* Returns null if this method is not a getter.
* <p/>
* The fieldname is the part of the name that is after the {@code get} or {@code is} part
* of the name.
* <p/>
* Example: methodName=getName will return fieldname=name
*
*
* @param method the method
* @return the fieldname if this is a getter method.
* @see #isGetterMethod(PsiMethod) for the getter check
*/
@Nullable
public static String getGetterFieldName(PsiMethod method) {
// must be a getter
if (!isGetterMethod(method)) {
return null;
}
return PropertyUtilBase.getPropertyNameByGetter(method);
}
/**
* Returns true if the field is enum (JDK1.5).
*
* @param field field to check if it's a enum
* @return true if enum.
*/
public static boolean isEnumField(PsiField field) {
PsiType type = field.getType();
if (!(type instanceof PsiClassType)) {
return false;
}
final PsiClassType classType = (PsiClassType)type;
final PsiClass aClass = classType.resolve();
return (aClass != null) && aClass.isEnum();
}
/**
* Is the class an exception - extends Throwable (will check super).
*
* @param clazz class to check.
* @return true if class is an exception.
*/
public static boolean isExceptionClass(PsiClass clazz) {
return InheritanceUtil.isInheritor(clazz, JAVA_LANG_THROWABLE);
}
/**
* Finds the public boolean equals(Object o) method.
*
* @param clazz the class.
* @return the method if it exists, null if not.
*/
@Nullable
public static PsiMethod findEqualsMethod(PsiClass clazz) {
PsiMethod[] methods = clazz.findMethodsByName("equals", false);
// is it public boolean equals(Object o)
for (PsiMethod method : methods) {
// must be public
if (!method.hasModifierProperty(PsiModifier.PUBLIC)) {
continue;
}
// must not be static
if (method.hasModifierProperty(PsiModifier.STATIC)) {
continue;
}
// must have boolean as return type
PsiType returnType = method.getReturnType();
if (!PsiType.BOOLEAN.equals(returnType)) {
continue;
}
// must have one parameter
PsiParameter[] parameters = method.getParameterList().getParameters();
if (parameters.length != 1) {
continue;
}
// parameter must be Object
if (!(parameters[0].getType().getCanonicalText().equals(JAVA_LANG_OBJECT))) {
continue;
}
// equals method found
return method;
}
// equals not found
return null;
}
/**
* Finds the public int hashCode() method.
*
* @param clazz the class.
* @return the method if it exists, null if not.
*/
@Nullable
public static PsiMethod findHashCodeMethod(PsiClass clazz) {
PsiMethod[] methods = clazz.findMethodsByName("hashCode", false);
// is it public int hashCode()
for (PsiMethod method : methods) {
// must be public
if (!method.hasModifierProperty(PsiModifier.PUBLIC)) {
continue;
}
// must not be static
if (method.hasModifierProperty(PsiModifier.STATIC)) {
continue;
}
// must have int as return type
PsiType returnType = method.getReturnType();
if (!PsiType.INT.equals(returnType)) {
continue;
}
// must not have a parameter
PsiParameterList parameters = method.getParameterList();
if (!parameters.isEmpty()) {
continue;
}
// hashCode method found
return method;
}
// hashCode not found
return null;
}
/**
* Check if the given type against a FQ classname (assignable).
*
* @param factory IDEA factory
* @param type the type
* @param typeFQClassName the FQ classname to test against.
* @return true if the given type is assignable of FQ classname.
*/
private static boolean isTypeOf(PsiElementFactory factory, PsiType type, String typeFQClassName) {
// fix for IDEA where fields can have 'void' type and generate NPE.
if (isTypeOfVoid(type)) {
return false;
}
if (isPrimitiveType(type)) {
return false;
}
GlobalSearchScope scope = type.getResolveScope();
if (scope == null) {
return false;
}
PsiType typeTarget = factory.createTypeByFQClassName(typeFQClassName, scope);
return typeTarget.isAssignableFrom(type);
}
/**
* Gets the names the given class implements (not FQ names).
*
* @param clazz the class
* @return the names.
*/
public static String @NotNull [] getImplementsClassnames(PsiClass clazz) {
PsiClass[] interfaces = clazz.getInterfaces();
if (interfaces.length == 0) {
return ArrayUtilRt.EMPTY_STRING_ARRAY;
}
String[] names = new String[interfaces.length];
for (int i = 0; i < interfaces.length; i++) {
PsiClass anInterface = interfaces[i];
names[i] = anInterface.getName();
}
return names;
}
/**
* Is the given type a primitive?
*
* @param type the type.
* @return true if primitive, false if not.
*/
public static boolean isPrimitiveType(PsiType type) {
return type instanceof PsiPrimitiveType;
}
public static boolean isNestedArray(PsiType aType) {
if (!(aType instanceof PsiArrayType)) return false;
final PsiType componentType = ((PsiArrayType)aType).getComponentType();
return componentType instanceof PsiArrayType;
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.workspaces.model;
import java.io.Serializable;
/**
* <p>
* Contains the results of the <a>DescribeWorkspaceBundles</a> operation.
* </p>
*/
public class DescribeWorkspaceBundlesResult implements Serializable, Cloneable {
/**
* <p>
* An array of structures that contain information about the bundles.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<WorkspaceBundle> bundles;
/**
* <p>
* If not null, more results are available. Pass this value for the
* <code>NextToken</code> parameter in a subsequent call to this operation
* to retrieve the next set of items. This token is valid for one day and
* must be used within that timeframe.
* </p>
*/
private String nextToken;
/**
* <p>
* An array of structures that contain information about the bundles.
* </p>
*
* @return An array of structures that contain information about the
* bundles.
*/
public java.util.List<WorkspaceBundle> getBundles() {
if (bundles == null) {
bundles = new com.amazonaws.internal.SdkInternalList<WorkspaceBundle>();
}
return bundles;
}
/**
* <p>
* An array of structures that contain information about the bundles.
* </p>
*
* @param bundles
* An array of structures that contain information about the bundles.
*/
public void setBundles(java.util.Collection<WorkspaceBundle> bundles) {
if (bundles == null) {
this.bundles = null;
return;
}
this.bundles = new com.amazonaws.internal.SdkInternalList<WorkspaceBundle>(
bundles);
}
/**
* <p>
* An array of structures that contain information about the bundles.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setBundles(java.util.Collection)} or
* {@link #withBundles(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param bundles
* An array of structures that contain information about the bundles.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeWorkspaceBundlesResult withBundles(
WorkspaceBundle... bundles) {
if (this.bundles == null) {
setBundles(new com.amazonaws.internal.SdkInternalList<WorkspaceBundle>(
bundles.length));
}
for (WorkspaceBundle ele : bundles) {
this.bundles.add(ele);
}
return this;
}
/**
* <p>
* An array of structures that contain information about the bundles.
* </p>
*
* @param bundles
* An array of structures that contain information about the bundles.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeWorkspaceBundlesResult withBundles(
java.util.Collection<WorkspaceBundle> bundles) {
setBundles(bundles);
return this;
}
/**
* <p>
* If not null, more results are available. Pass this value for the
* <code>NextToken</code> parameter in a subsequent call to this operation
* to retrieve the next set of items. This token is valid for one day and
* must be used within that timeframe.
* </p>
*
* @param nextToken
* If not null, more results are available. Pass this value for the
* <code>NextToken</code> parameter in a subsequent call to this
* operation to retrieve the next set of items. This token is valid
* for one day and must be used within that timeframe.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* If not null, more results are available. Pass this value for the
* <code>NextToken</code> parameter in a subsequent call to this operation
* to retrieve the next set of items. This token is valid for one day and
* must be used within that timeframe.
* </p>
*
* @return If not null, more results are available. Pass this value for the
* <code>NextToken</code> parameter in a subsequent call to this
* operation to retrieve the next set of items. This token is valid
* for one day and must be used within that timeframe.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* If not null, more results are available. Pass this value for the
* <code>NextToken</code> parameter in a subsequent call to this operation
* to retrieve the next set of items. This token is valid for one day and
* must be used within that timeframe.
* </p>
*
* @param nextToken
* If not null, more results are available. Pass this value for the
* <code>NextToken</code> parameter in a subsequent call to this
* operation to retrieve the next set of items. This token is valid
* for one day and must be used within that timeframe.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeWorkspaceBundlesResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBundles() != null)
sb.append("Bundles: " + getBundles() + ",");
if (getNextToken() != null)
sb.append("NextToken: " + getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeWorkspaceBundlesResult == false)
return false;
DescribeWorkspaceBundlesResult other = (DescribeWorkspaceBundlesResult) obj;
if (other.getBundles() == null ^ this.getBundles() == null)
return false;
if (other.getBundles() != null
&& other.getBundles().equals(this.getBundles()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null
&& other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getBundles() == null) ? 0 : getBundles().hashCode());
hashCode = prime * hashCode
+ ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public DescribeWorkspaceBundlesResult clone() {
try {
return (DescribeWorkspaceBundlesResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/**
* Copyright (c) 2011-2013 Evolutionary Design and Optimization Group
*
* Licensed under the MIT License.
*
* See the "LICENSE" file for a copy of the license.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package evogpj.gp;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashMap;
import evogpj.evaluation.FitnessComparisonStandardizer;
import evogpj.evaluation.FitnessFunction;
/**
* Class representing collection of all individuals in the run. Just a wrapper
* around the ArrayList class.
*
* @author Owen Derby
*/
public class Population extends ArrayList<Individual> implements Serializable {
private static final long serialVersionUID = 6111020814262385165L;
/**
* default constructor
*/
public Population() {
super();
}
/**
* a constructor which yields the combination of populations
* @param populations
*/
public Population(Population... populations) {
super();
for (Population population : populations) {
this.addAll(population);
}
}
/**
* equals method
* @param other
* @return
*/
@Override
public boolean equals(Object other) {
if (!this.getClass().equals(other.getClass()))
return false;
ArrayList<Individual> otherL = (ArrayList<Individual>) other;
if (otherL.size() != this.size())
return false;
for (int i = 0; i < otherL.size(); i++) {
if (!this.get(i).equals(otherL.get(i)))
return false;
}
return true;
}
/**
* perform a deep copy of the population
* @return
*/
public Population deepCopy() {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos;
try {
oos = new ObjectOutputStream(bos);
oos.writeObject(this);
ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray()));
return (Population) ois.readObject();
} catch (IOException e) {
System.err.println("IO Exception in deepCopy");
} catch (ClassNotFoundException e) {
System.err.println("Class not found Exception in deepCopy");
}
return null;
}
/**
* Calculates the euclidean distance of all individuals in the first front. Others will keep a distance of Double.MAX_VALUE
* @param fitnessFunctions
*/
public void calculateEuclideanDistances(LinkedHashMap<String, FitnessFunction> fitnessFunctions) {
// first get the mins and maxes for the first front only
LinkedHashMap<String, Double>[] minMax = getMinMax(this, fitnessFunctions, true);
LinkedHashMap<String, Double> mins = minMax[0];
LinkedHashMap<String, Double> maxes = minMax[1];
// convert the mins and maxes to standardized form
for (String key : mins.keySet()) {
// get new standardized min and max. swap values if this fitness function isn't minimizing already
if (fitnessFunctions.get(key).isMaximizingFunction()) { // swap min and max since they've both been inverted
Double standardizedMin = FitnessComparisonStandardizer.getFitnessForMinimization(maxes, key, fitnessFunctions);
Double standardizedMax = FitnessComparisonStandardizer.getFitnessForMinimization(mins, key, fitnessFunctions);
mins.put(key, standardizedMin);
maxes.put(key, standardizedMax);
} else {
Double standardizedMin = FitnessComparisonStandardizer.getFitnessForMinimization(mins, key, fitnessFunctions);
Double standardizedMax = FitnessComparisonStandardizer.getFitnessForMinimization(maxes, key, fitnessFunctions);
mins.put(key, standardizedMin);
maxes.put(key, standardizedMax);
}
}
// create the ranges needed for scaling
LinkedHashMap<String, Double> ranges = new LinkedHashMap<String, Double>();
for (String key : mins.keySet()) {
ranges.put(key, Math.abs(maxes.get(key) - mins.get(key)));
}
// compute euclidean distances for the first front only
for (Individual individual : this) {
if (individual.getDominationCount() > 0) continue;
individual.calculateEuclideanDistance(fitnessFunctions, mins, ranges);
}
}
/**
* Find the min and max value for each fitness function
*
* @param pop
* @param fitnessFunctions
* @param onlyFirstFront if true, mins/maxes calculated only for individuals in the first front
* @return
*/
public static LinkedHashMap<String, Double>[] getMinMax(Population pop,LinkedHashMap<String, FitnessFunction> fitnessFunctions, Boolean onlyFirstFront) {
LinkedHashMap<String, Double>[] minMax = new LinkedHashMap[2];
LinkedHashMap<String, Double> mins = new LinkedHashMap<String, Double>();
LinkedHashMap<String, Double> maxes = new LinkedHashMap<String, Double>();
// establish order of fitness functions in mins and maxes
for (String id : fitnessFunctions.keySet()) {
mins.put(id, pop.get(0).getFitness(id));
maxes.put(id, pop.get(0).getFitness(id));
}
// find mins and maxes
for (Individual i : pop) {
if (onlyFirstFront && i.getDominationCount() > 0) continue;
for (String funcName : i.getFitnessNames()) {
Double iFitness = i.getFitness(funcName);
if (iFitness < mins.get(funcName)) // lower min
mins.put(funcName, iFitness);
if (iFitness > maxes.get(funcName)) // higher max
maxes.put(funcName, iFitness);
}
}
minMax[0] = mins;
minMax[1] = maxes;
return minMax;
}
/**
* Non-dominating sort of the population
*/
public void sort() {
Collections.sort(this, new DominationCrowdingSortComparator(false));
}
/**
*
* @return
*/
@Override
public int hashCode() {
int hash = 7;
return hash;
}
/**
* Comparator to compare the domination relationship between individuals of the population
*/
public class DominationCrowdingSortComparator implements Comparator<Individual> {
// can't use crowding distance for comparison if it's not being computed a level above
boolean crowdingDistanceEnabled;
/**
* default constructor
*/
public DominationCrowdingSortComparator() {
this(false);
}
/**
* whether to use crowding distance to break ties
* @param _crowdingDistanceEnabled
*/
public DominationCrowdingSortComparator(boolean _crowdingDistanceEnabled) {
crowdingDistanceEnabled = _crowdingDistanceEnabled;
}
/**
* compare two individuals
* @param a
* @param b
* @return
*/
@Override
public int compare(Individual a, Individual b) {
Integer ad = a.getDominationCount();
Integer bd = b.getDominationCount();
// sort by domination count: lower is better
if (ad > bd) return 1;
else if (ad < bd) return -1;
// don't proceed to crowding distance sorting if we're not computing crowding distance
if (!crowdingDistanceEnabled) return 0;
// now sort by crowding distance: higher is better
Double ac = a.getCrowdingDistance();
Double bc = b.getCrowdingDistance();
if (ac < bc) return 1;
else if (ac > bc) return -1;
else return 0;
}
}
/**
* this class sorts individuals according to their fitness in validation data
*/
public class CrossValSortComparator implements Comparator<Individual>{
/**
* default constructor
*/
public CrossValSortComparator() {
}
/**
* compare 2 individuals
* @param a
* @param b
* @return
*/
@Override
public int compare(Individual a, Individual b) {
double aCVF = a.getCrossValFitness();
double bCVF = b.getCrossValFitness();
// sort by domination count: lower is better
if (aCVF == bCVF) {
return 0;
} else if (aCVF > bCVF){
return 1;
}else {
return -1;
}
}
}
/**
* sort individuals according to non-domination
* @param crowdingSortEnabled
*/
public void sort(boolean crowdingSortEnabled) {
Collections.sort(this, new DominationCrowdingSortComparator(crowdingSortEnabled));
}
/**
* sort individuals according to the fitness on validation
*/
public void sortCrossVal() {
Collections.sort(this, new CrossValSortComparator());
}
}
| |
/*
* Copyright (C) 2014-2022 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.ubl20.codelist;
import com.helger.commons.annotation.CodingStyleguideUnaware;
import com.helger.commons.annotation.Nonempty;
import com.helger.commons.id.IHasID;
import com.helger.commons.lang.EnumHelper;
import com.helger.commons.name.IHasDisplayName;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import un.unece.uncefact.codelist.specification._5639._1988.LanguageCodeContentType;
/**
* This file was automatically generated from Genericode file LanguageCode-2.0.gc. Do NOT edit!
* It contains a total of 276 entries!
* @author com.helger.ubl20.main.MainCreateEnumsGenericode20
*/
@CodingStyleguideUnaware
public enum ELanguageCode20
implements IHasID<String> , IHasDisplayName
{
AA("AA", "Afar"),
AB("AB", "Abkhazian"),
AF("AF", "Afrikaans"),
AM("AM", "Amharic"),
AR("AR", "Arabic"),
AS("AS", "Assamese"),
AY("AY", "Aymara"),
AZ("AZ", "Azerbaijani"),
BA("BA", "Bashkir"),
BE("BE", "Byelorussian"),
BG("BG", "Bulgarian"),
BH("BH", "Bihari"),
BI("BI", "Bislama"),
BN("BN", "Bengali; Bangla"),
BO("BO", "Tibetan"),
BR("BR", "Breton"),
CA("CA", "Catalan"),
CO("CO", "Corsican"),
CS("CS", "Czech"),
CY("CY", "Welsh"),
DA("DA", "Danish"),
DE("DE", "German"),
DZ("DZ", "Bhutani"),
EL("EL", "Greek"),
EN("EN", "English"),
EO("EO", "Esperanto"),
ES("ES", "Spanish"),
ET("ET", "Esotonian"),
EU("EU", "Basque"),
FA("FA", "Persian"),
FI("FI", "Finnish"),
FJ("FJ", "Fiji"),
FO("FO", "Faroese"),
FR("FR", "French"),
FY("FY", "Frisian"),
GA("GA", "Irish"),
GD("GD", "Scots Gaelic"),
GL("GL", "Galician"),
GN("GN", "Guarani"),
GU("GU", "Gujarati"),
HA("HA", "Hausa"),
HE("HE", "Hebrew"),
HI("HI", "Hindi"),
HR("HR", "Croatian"),
HU("HU", "Hungarian"),
HY("HY", "Armenian"),
IA("IA", "Interlingua"),
ID("ID", "Indonesian"),
IE("IE", "Interlingue"),
IK("IK", "Inupiak"),
IS("IS", "Icelandic"),
IT("IT", "Italian"),
IU("IU", "Eskimo, Inuktiut CA"),
JA("JA", "Japanese"),
JW("JW", "Javanese"),
KA("KA", "Georgian"),
KK("KK", "Kazakh"),
KL("KL", "Greenlandic"),
KM("KM", "Cambodian"),
KN("KN", "Kannada"),
KO("KO", "Korean"),
KS("KS", "Kashmiri"),
KU("KU", "Kurdish"),
KY("KY", "Kirghiz"),
LA("LA", "Latin"),
LN("LN", "Lingala"),
LO("LO", "Laothian"),
LT("LT", "Lithuanian"),
LV("LV", "Latvian, Lettish"),
MG("MG", "Malagasy"),
MI("MI", "Maori"),
MK("MK", "Macedonian"),
ML("ML", "Malayalam"),
MN("MN", "Mangolian"),
MO("MO", "Moldavian"),
MR("MR", "Marathi"),
MS("MS", "Malay"),
MT("MT", "Maltese"),
NA("NA", "Nauru"),
NE("NE", "Nepali"),
NL("NL", "Dutch"),
NO("NO", "Norwegian"),
OC("OC", "Occitan"),
OM("OM", "(Afan) Oromo"),
OR("OR", "Oriya"),
PA("PA", "Punjabi"),
PL("PL", "Polish"),
PS("PS", "Pashto, Pushto"),
PT("PT", "Portuguese"),
QU("QU", "Quechua"),
RM("RM", "Rhaeto-Romance"),
RN("RN", "Kirundi"),
RO("RO", "Romanian"),
RU("RU", "Russian"),
RW("RW", "Kinyarwanda"),
SA("SA", "Sanskrit"),
SD("SD", "Sindhi"),
SG("SG", "Sangho"),
SH("SH", "Serbo-Croatian"),
SI("SI", "Singhalese"),
SK("SK", "Slovak"),
SL("SL", "Slovenian"),
SM("SM", "Samoan"),
SN("SN", "Shona"),
SO("SO", "Somali"),
SQ("SQ", "Albanian"),
SR("SR", "Serbian"),
SS("SS", "Siswati"),
ST("ST", "Sesotho"),
SU("SU", "Sundanese"),
SV("SV", "Swedish"),
SW("SW", "Sawahili"),
TA("TA", "Tamil"),
TE("TE", "Telugu"),
TG("TG", "Tajik"),
TH("TH", "Thai"),
TI("TI", "Tigrinya"),
TK("TK", "Turkmen"),
TL("TL", "Tagalog"),
TN("TN", "Setswana"),
TO("TO", "Tonga"),
TR("TR", "Turkish"),
TS("TS", "Tsonga"),
TT("TT", "Tatar"),
TW("TW", "Twi"),
UG("UG", "Uigur"),
UK("UK", "Ukrainian"),
UR("UR", "Urdu"),
UZ("UZ", "Uzbek"),
VI("VI", "Vietnamese"),
VO("VO", "Volap\u00c3\u00bck"),
WO("WO", "Wolof"),
XH("XH", "hosa"),
YI("YI", "Yiddish"),
YO("YO", "Yoruba"),
ZA("ZA", "Zhuang"),
ZH("ZH", "Chinese"),
ZU("ZU", "Zulu"),
aa("aa", "Afar"),
ab("ab", "Abkhazian"),
af("af", "Afrikaans"),
am("am", "Amharic"),
ar("ar", "Arabic"),
as("as", "Assamese"),
ay("ay", "Aymara"),
az("az", "Azerbaijani"),
ba("ba", "Bashkir"),
be("be", "Byelorussian"),
bg("bg", "Bulgarian"),
bh("bh", "Bihari"),
bi("bi", "Bislama"),
bn("bn", "Bengali; Bangla"),
bo("bo", "Tibetan"),
br("br", "Breton"),
ca("ca", "Catalan"),
co("co", "Corsican"),
cs("cs", "Czech"),
cy("cy", "Welsh"),
da("da", "Danish"),
de("de", "German"),
dz("dz", "Bhutani"),
el("el", "Greek"),
en("en", "English"),
eo("eo", "Esperanto"),
es("es", "Spanish"),
et("et", "Esotonian"),
eu("eu", "Basque"),
fa("fa", "Persian"),
fi("fi", "Finnish"),
fj("fj", "Fiji"),
fo("fo", "Faroese"),
fr("fr", "French"),
fy("fy", "Frisian"),
ga("ga", "Irish"),
gd("gd", "Scots Gaelic"),
gl("gl", "Galician"),
gn("gn", "Guarani"),
gu("gu", "Gujarati"),
ha("ha", "Hausa"),
he("he", "Hebrew"),
hi("hi", "Hindi"),
hr("hr", "Croatian"),
hu("hu", "Hungarian"),
hy("hy", "Armenian"),
ia("ia", "Interlingua"),
id("id", "Indonesian"),
ie("ie", "Interlingue"),
ik("ik", "Inupiak"),
is("is", "Icelandic"),
it("it", "Italian"),
iu("iu", "Eskimo, Inuktiut CA"),
ja("ja", "Japanese"),
jw("jw", "Javanese"),
ka("ka", "Georgian"),
kk("kk", "Kazakh"),
kl("kl", "Greenlandic"),
km("km", "Cambodian"),
kn("kn", "Kannada"),
ko("ko", "Korean"),
ks("ks", "Kashmiri"),
ku("ku", "Kurdish"),
ky("ky", "Kirghiz"),
la("la", "Latin"),
ln("ln", "Lingala"),
lo("lo", "Laothian"),
lt("lt", "Lithuanian"),
lv("lv", "Latvian, Lettish"),
mg("mg", "Malagasy"),
mi("mi", "Maori"),
mk("mk", "Macedonian"),
ml("ml", "Malayalam"),
mn("mn", "Mangolian"),
mo("mo", "Moldavian"),
mr("mr", "Marathi"),
ms("ms", "Malay"),
mt("mt", "Maltese"),
na("na", "Nauru"),
ne("ne", "Nepali"),
nl("nl", "Dutch"),
no("no", "Norwegian"),
oc("oc", "Occitan"),
om("om", "(Afan) Oromo"),
or("or", "Oriya"),
pa("pa", "Punjabi"),
pl("pl", "Polish"),
ps("ps", "Pashto, Pushto"),
pt("pt", "Portuguese"),
qu("qu", "Quechua"),
rm("rm", "Rhaeto-Romance"),
rn("rn", "Kirundi"),
ro("ro", "Romanian"),
ru("ru", "Russian"),
rw("rw", "Kinyarwanda"),
sa("sa", "Sanskrit"),
sd("sd", "Sindhi"),
sg("sg", "Sangho"),
sh("sh", "Serbo-Croatian"),
si("si", "Singhalese"),
sk("sk", "Slovak"),
sl("sl", "Slovenian"),
sm("sm", "Samoan"),
sn("sn", "Shona"),
so("so", "Somali"),
sq("sq", "Albanian"),
sr("sr", "Serbian"),
ss("ss", "Siswati"),
st("st", "Sesotho"),
su("su", "Sundanese"),
sv("sv", "Swedish"),
sw("sw", "Sawahili"),
ta("ta", "Tamil"),
te("te", "Telugu"),
tg("tg", "Tajik"),
th("th", "Thai"),
ti("ti", "Tigrinya"),
tk("tk", "Turkmen"),
tl("tl", "Tagalog"),
tn("tn", "Setswana"),
to("to", "Tonga"),
tr("tr", "Turkish"),
ts("ts", "Tsonga"),
tt("tt", "Tatar"),
tw("tw", "Twi"),
ug("ug", "Uigur"),
uk("uk", "Ukrainian"),
ur("ur", "Urdu"),
uz("uz", "Uzbek"),
vi("vi", "Vietnamese"),
vo("vo", "Volap\u00c3\u00bck"),
wo("wo", "Wolof"),
xh("xh", "hosa"),
yi("yi", "Yiddish"),
yo("yo", "Yoruba"),
za("za", "Zhuang"),
zh("zh", "Chinese"),
zu("zu", "Zulu");
public static final String AGENCY_ID = "6";
public static final String AGENCY_LONG_NAME = "United Nations Economic Commission for Europe";
public static final String LIST_VERSION = "UN D.05A";
private final String m_sID;
private final String m_sDisplayName;
private ELanguageCode20(@Nonnull @Nonempty final String sID, @Nonnull final String sDisplayName) {
m_sID = sID;
m_sDisplayName = sDisplayName;
}
@Nonnull
@Nonempty
public String getID() {
return m_sID;
}
@Nonnull
public String getDisplayName() {
return m_sDisplayName;
}
@Nullable
public static ELanguageCode20 getFromIDOrNull(@Nullable final String sID) {
return EnumHelper.getFromIDOrNull(ELanguageCode20 .class, sID);
}
@Nullable
public static String getDisplayNameFromIDOrNull(@Nullable final String sID) {
final ELanguageCode20 eValue = ELanguageCode20 .getFromIDOrNull(sID);
return ((eValue == null)?null:eValue.getDisplayName());
}
@Nullable
public static ELanguageCode20 getFromJAXBOrNull(@Nullable final LanguageCodeContentType aID) {
return ((aID == null)?null:ELanguageCode20 .getFromIDOrNull(aID.value()));
}
@Nullable
public static String getDisplayNameFromJAXBOrNull(@Nullable final LanguageCodeContentType aID) {
return ((aID == null)?null:ELanguageCode20 .getDisplayNameFromIDOrNull(aID.value()));
}
}
| |
/*
* Copyright (c) 2015, Alachisoft. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alachisoft.tayzgrid.caching.topologies.local;
import com.alachisoft.tayzgrid.caching.CacheEntry;
import com.alachisoft.tayzgrid.caching.CacheRuntimeContext;
import com.alachisoft.tayzgrid.caching.ItemRemoveReason;
import com.alachisoft.tayzgrid.caching.OperationContext;
import com.alachisoft.tayzgrid.caching.OperationContextFieldName;
import com.alachisoft.tayzgrid.caching.autoexpiration.AggregateExpirationHint;
import com.alachisoft.tayzgrid.caching.autoexpiration.ExpirationHint;
import com.alachisoft.tayzgrid.caching.autoexpiration.FixedExpiration;
import com.alachisoft.tayzgrid.caching.autoexpiration.IdleExpiration;
import com.alachisoft.tayzgrid.caching.evictionpolicies.EvictionHint;
import com.alachisoft.tayzgrid.caching.evictionpolicies.EvictionPolicyFactory;
import com.alachisoft.tayzgrid.caching.evictionpolicies.IEvictionPolicy;
import com.alachisoft.tayzgrid.caching.evictionpolicies.PriorityEvictionHint;
import com.alachisoft.tayzgrid.caching.topologies.CacheAddResult;
import com.alachisoft.tayzgrid.caching.topologies.CacheBase;
import com.alachisoft.tayzgrid.caching.topologies.CacheInsResult;
import com.alachisoft.tayzgrid.caching.topologies.ICacheEventsListener;
import com.alachisoft.tayzgrid.common.DeleteParams;
import com.alachisoft.tayzgrid.common.InsertParams;
import com.alachisoft.tayzgrid.common.ResetableIterator;
import com.alachisoft.tayzgrid.common.ServicePropValues;
import com.alachisoft.tayzgrid.common.monitoring.ServerMonitor;
import com.alachisoft.tayzgrid.common.net.Address;
import com.alachisoft.tayzgrid.runtime.exceptions.CacheException;
import com.alachisoft.tayzgrid.runtime.exceptions.ConfigurationException;
import com.alachisoft.tayzgrid.runtime.exceptions.GeneralFailureException;
import com.alachisoft.tayzgrid.runtime.exceptions.LockingException;
import com.alachisoft.tayzgrid.runtime.exceptions.OperationFailedException;
import com.alachisoft.tayzgrid.storage.CacheStorageFactory;
import com.alachisoft.tayzgrid.storage.ICacheStorage;
import com.alachisoft.tayzgrid.storage.StoreAddResult;
import com.alachisoft.tayzgrid.storage.StoreInsResult;
import com.alachisoft.tayzgrid.caching.exceptions.StateTransferException;
import java.net.*;
import java.util.Iterator;
public class LocalCache extends LocalCacheBase implements Runnable {
protected ICacheStorage _cacheStore;
protected IEvictionPolicy _evictionPolicy;
private Thread _evictionThread;
private Object _eviction_sync_mutex = new Object();
private boolean _allowExplicitGCCollection = true;
private boolean _notifyCacheFull = false;
public LocalCache(java.util.Map cacheClasses, CacheBase parentCache, java.util.Map properties, ICacheEventsListener listener, CacheRuntimeContext context) throws ConfigurationException {
super(properties, parentCache, listener, context);
_stats.setClassName("local-cache");
Initialize(cacheClasses, properties);
}
@Override
public void dispose() {
if (_cacheStore != null) {
_cacheStore.dispose();
_cacheStore = null;
}
super.dispose();
}
@Override
public void run() {
EvictAysnc();
}
@Override
public long getCount() {
if (ServerMonitor.getMonitorActivity()) {
ServerMonitor.LogClientActivity("LocalCache.Count", "");
}
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
return _cacheStore.getCount();
}
@Override
public long getSessionCount() {
return _stats.getSessionCount();
}
@Override
public InetAddress getServerJustLeft() {
return null;
}
@Override
public void setServerJustLeft(InetAddress value) {
;
}
@Override
public int getServersCount() {
return 1;
}
@Override
public boolean IsServerNodeIp(Address clientAddress) {
return false;
}
@Override
public long getSize() {
if (_cacheStore != null) {
return _cacheStore.getSize();
}
return 0;
}
@Override
public float getEvictRatio() {
if (_evictionPolicy != null) {
return _evictionPolicy.getEvictRatio();
}
return 0;
}
@Override
public void setEvictRatio(float value) {
if (_evictionPolicy != null) {
_evictionPolicy.setEvictRatio(value);
}
}
@Override
public long getMaxSize() {
if (_cacheStore != null) {
return _cacheStore.getMaxSize();
}
return 0;
}
@Override
public void setMaxSize(long value) throws CacheException {
if (_cacheStore != null) {
//if the cache has less data than the new maximum size.
//we can not apply the new size to the cache if the cache has already more data.
if (_cacheStore.getSize() <= value) {
_cacheStore.setMaxSize(value);
_stats.setMaxSize(value);
_context.PerfStatsColl.setCacheMaxSizeStats(value);
} else {
throw new CacheException("You need to remove some data from cache before applying the new size");
}
}
}
@Override
public boolean getVirtualUnlimitedSpace()
{
return _cacheStore.getVirtualUnlimitedSpace();
}
@Override
public void setVirtualUnlimitedSpace(boolean isVirtualUnlimitedSpace)
{
_cacheStore.setVirtualUnlimitedSpace(isVirtualUnlimitedSpace);
}
@Override
public boolean CanChangeCacheSize(long size) {
return (_cacheStore.getSize() <= size);
}
@Override
protected void Initialize(java.util.Map cacheClasses, java.util.Map properties) throws ConfigurationException {
if (properties == null) {
throw new IllegalArgumentException("properties");
}
try {
super.Initialize(cacheClasses, properties);
if (ServicePropValues.Cache_EnableGCCollection != null) {
_allowExplicitGCCollection = Boolean.parseBoolean(ServicePropValues.Cache_EnableGCCollection);
}
if (!properties.containsKey("storage")) {
throw new ConfigurationException("Missing configuration option 'storage'");
}
if (properties.containsKey("scavenging-policy")) {
java.util.Map evictionProps = (java.util.Map) ((properties.get("scavenging-policy") instanceof java.util.Map) ? properties.get("scavenging-policy") : null);
if (evictionProps.containsKey("eviction-enabled")) {
String evictionEnabled = evictionProps.get("eviction-enabled").toString();
String evictRatio = evictionProps.get("evict-ratio").toString();
Boolean createEvictionPolicy = false;
if (evictionEnabled != null) {
createEvictionPolicy = Boolean.parseBoolean(evictionEnabled);
}
if (evictRatio != null && !evictRatio.toLowerCase().equals("null")) {
createEvictionPolicy &= Double.parseDouble(evictRatio) > 0;
} else {
createEvictionPolicy = false;
}
if (createEvictionPolicy) {
_evictionPolicy = EvictionPolicyFactory.CreateEvictionPolicy(evictionProps);
}
}
} else {
_evictionPolicy = EvictionPolicyFactory.CreateDefaultEvictionPolicy();
}
java.util.Map storageProps = (java.util.Map) ((properties.get("storage") instanceof java.util.Map) ? properties.get("storage") : null);
_cacheStore = CacheStorageFactory.CreateStorageProvider(storageProps, this._context.getSerializationContext(), _evictionPolicy != null, _context.getCacheLog(), _context.getEmailAlertNotifier());
_stats.setMaxCount(_cacheStore.getMaxCount());
_stats.setMaxSize(_cacheStore.getMaxSize());
} catch (ConfigurationException e) {
if (_context != null) {
_context.getCacheLog().Error("LocalCache.Initialize()", e.toString());
}
dispose();
throw e;
} catch (Exception e) {
if (_context != null) {
_context.getCacheLog().Error("LocalCache.Initialize()", e.toString());
}
dispose();
throw new ConfigurationException("Configuration Error: " + e.toString(), e);
}
}
@Override
public void ClearInternal() {
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
_cacheStore.Clear();
_context.PerfStatsColl.setCacheSizeStats(0); // on clear cache, cachesize set to zero
if (_evictionThread != null) {
getCacheLog().Flush();
_evictionThread.stop();
}
if (_evictionPolicy != null) {
_evictionPolicy.Clear();
if(_context.PerfStatsColl != null)
{
_context.PerfStatsColl.SetEvictionIndexSize(_evictionPolicy.getIndexInMemorySize());
}
}
}
@Override
public boolean ContainsInternal(Object key) throws StateTransferException {
if (ServerMonitor.getMonitorActivity()) {
ServerMonitor.LogClientActivity("LocalCache.Cont", "");
}
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
return _cacheStore.Contains(key);
}
@Override
public CacheEntry GetInternal(Object key, boolean isUserOperation, OperationContext operationContext) throws StateTransferException {
if (ServerMonitor.getMonitorActivity()) {
ServerMonitor.LogClientActivity("LocalCache.Get", "");
}
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
CacheEntry e = (CacheEntry) _cacheStore.Get(key);
if (e != null) {
EvictionHint evh = e.getEvictionHint();
if (isUserOperation && _evictionPolicy != null && evh != null && evh.getIsVariant()) {
_evictionPolicy.Notify(key, evh, null);
}
}
return e;
}
@Override
public CacheEntry GetEntryInternal(Object key, boolean isUserOperation) {
if (ServerMonitor.getMonitorActivity()) {
ServerMonitor.LogClientActivity("LocalCache.GetInternal", "");
}
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
CacheEntry e = (CacheEntry) _cacheStore.Get(key);
if (e == null) {
return e;
}
EvictionHint evh = e.getEvictionHint();
if (isUserOperation && _evictionPolicy != null && evh != null && evh.getIsVariant()) {
_evictionPolicy.Notify(key, evh, null);
}
return e;
}
@Override
public int GetItemSize(Object key) {
if (_cacheStore == null) {
return 0;
}
return _cacheStore.GetItemSize(key);
}
@Override
public CacheAddResult AddInternal(Object key, CacheEntry cacheEntry, boolean isUserOperation) throws StateTransferException, CacheException {
if (ServerMonitor.getMonitorActivity()) {
ServerMonitor.LogClientActivity("LocalCache.Add_1", "");
}
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
if (_evictionPolicy != null) {
if (cacheEntry.getEvictionHint() instanceof PriorityEvictionHint) {
cacheEntry.setPriority(((PriorityEvictionHint) cacheEntry.getEvictionHint()).getPriority());
}
cacheEntry.setEvictionHint(_evictionPolicy.CompatibleHint(cacheEntry.getEvictionHint()));
}
if(_evictionPolicy == null)
cacheEntry.setEvictionHint(null);
StoreAddResult result = _cacheStore.Add(key, cacheEntry,!isUserOperation);
// Operation completed!
if (result == StoreAddResult.Success || result == StoreAddResult.SuccessNearEviction) {
if (_evictionPolicy != null) {
_evictionPolicy.Notify(key, null, cacheEntry.getEvictionHint());
}
}
if (result == StoreAddResult.NotEnoughSpace && !_notifyCacheFull) {
_notifyCacheFull = true;
_context.getCacheLog().Error("LocalCache.AddInternal", "The cache is full and not enough items could be evicted.");
}
if(_context.PerfStatsColl != null)
{
if(_evictionPolicy != null)
{
_context.PerfStatsColl.SetEvictionIndexSize(_evictionPolicy.getIndexInMemorySize());
}
if(_context.ExpiryMgr != null)
{
_context.PerfStatsColl.SetExpirationIndexSize(_context.ExpiryMgr.getIndexInMemorySize());
}
}
switch (result) {
case Success:
return CacheAddResult.Success;
case KeyExists:
return CacheAddResult.KeyExists;
case NotEnoughSpace:
return CacheAddResult.NeedsEviction;
case SuccessNearEviction:
return CacheAddResult.SuccessNearEviction;
}
return CacheAddResult.Failure;
}
@Override
public boolean AddInternal(Object key, ExpirationHint eh, OperationContext operationContext) throws StateTransferException {
if (ServerMonitor.getMonitorActivity()) {
ServerMonitor.LogClientActivity("LocalCache.Add_2", "");
}
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
CacheEntry e = (CacheEntry) _cacheStore.Get(key);
if (e == null) {
return false;
}
//We only allow either idle expiration or Fixed expiration both cannot be set at the same time
if ((e.getExpirationHint() instanceof IdleExpiration && eh instanceof FixedExpiration) || (e.getExpirationHint() instanceof FixedExpiration && eh instanceof IdleExpiration)) {
return false;
}
if (e.getExpirationHint() == null) {
e.setExpirationHint(eh);
} else {
if (e.getExpirationHint() instanceof AggregateExpirationHint) {
((AggregateExpirationHint) e.getExpirationHint()).Add(eh);
} else {
AggregateExpirationHint aeh = new AggregateExpirationHint();
aeh.Add(e.getExpirationHint());
aeh.Add(eh);
e.setExpirationHint(aeh);
}
}
_cacheStore.Insert(key, e,true);
e.setLastModifiedTime(new java.util.Date());
if(_context.PerfStatsColl != null)
{
if(_evictionPolicy != null)
{
_context.PerfStatsColl.SetEvictionIndexSize(_evictionPolicy.getIndexInMemorySize());
}
if(_context.ExpiryMgr != null)
{
_context.PerfStatsColl.SetExpirationIndexSize(_context.ExpiryMgr.getIndexInMemorySize());
}
}
return true;
}
@Override
public boolean RemoveInternal(Object key, ExpirationHint eh) throws StateTransferException, CacheException {
if (ServerMonitor.getMonitorActivity()) {
ServerMonitor.LogClientActivity("LocalCache.Remove", "");
}
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
CacheEntry e = (CacheEntry) _cacheStore.Get(key);
if (e == null || e.getExpirationHint() == null) {
return false;
} else {
if (e.getExpirationHint() instanceof AggregateExpirationHint) {
AggregateExpirationHint AggHint = new AggregateExpirationHint();
AggregateExpirationHint entryExpHint = (AggregateExpirationHint) e.getExpirationHint();
for (Iterator it = entryExpHint.iterator(); it.hasNext();) {
ExpirationHint exp = (ExpirationHint) it.next();
if (!exp.equals(eh)) {
AggHint.Add(exp);
}
}
e.setExpirationHint(AggHint);
} else if (e.getExpirationHint().equals(eh)) {
e.setExpirationHint(null);
}
}
if (_notifyCacheFull) {
_notifyCacheFull = false;
}
_cacheStore.Insert(key, e,true);
e.setLastModifiedTime(new java.util.Date());
if(_context.PerfStatsColl != null)
{
if(_evictionPolicy != null)
_context.PerfStatsColl.SetEvictionIndexSize((long)_evictionPolicy.getIndexInMemorySize());
if(_context.ExpiryMgr != null)
_context.PerfStatsColl.SetExpirationIndexSize((long)_context.ExpiryMgr.getIndexInMemorySize());
}
return true;
}
@Override
public CacheInsResult InsertInternal(Object key, CacheEntry cacheEntry, boolean isUserOperation, CacheEntry oldEntry, OperationContext operationContext) throws StateTransferException, CacheException {
if (ServerMonitor.getMonitorActivity()) {
ServerMonitor.LogClientActivity("LocalCache.Insert", "");
}
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
if (cacheEntry.getEvictionHint() instanceof PriorityEvictionHint) {
cacheEntry.setPriority(((PriorityEvictionHint) cacheEntry.getEvictionHint()).getPriority());
}
if (_evictionPolicy != null) {
cacheEntry.setEvictionHint(_evictionPolicy.CompatibleHint(cacheEntry.getEvictionHint()));
}
EvictionHint peEvh = oldEntry == null ? null : oldEntry.getEvictionHint();
if(_evictionPolicy == null)
cacheEntry.setEvictionHint(null);
StoreInsResult result = StoreInsResult.Failure;
boolean doInsert = false;
InsertParams options = null;
Object obj = operationContext.GetValueByField(OperationContextFieldName.InsertParams);
if(obj != null)
{
options = (InsertParams) obj;
}
if(options == null)
doInsert = true;
else
{
if(options.IsReplaceOperation)
{
if(options.CompareOldValue)
{
//--TODO-- Compare Old value, doInsert if match
if(oldEntry == null && options.OldValue == null)
doInsert = true;
if(oldEntry!= null && oldEntry.valueEquals(options.OldValue))
doInsert = true;
if(oldEntry!=null && !oldEntry.valueEquals(options.OldValue))
operationContext.Add(OperationContextFieldName.ExtendExpiry, true);
}
else if(oldEntry == null)
result = StoreInsResult.Failure;
else
doInsert = true;
}
else
{
if(options.CompareOldValue)
{
//--TODO-- Compare Old value, doInsert if match
if(oldEntry == null && options.OldValue == null)
doInsert = true;
if(oldEntry!= null && oldEntry.valueEquals(options.OldValue))
doInsert = true;
}
else
doInsert = true;
}
}
if(doInsert)
result = _cacheStore.Insert(key, cacheEntry,!isUserOperation);
// Operation completed!
if (result == StoreInsResult.Success || result == StoreInsResult.SuccessNearEviction) {
if (_evictionPolicy != null) {
_evictionPolicy.Notify(key, null, cacheEntry.getEvictionHint());
}
} else if (result == StoreInsResult.SuccessOverwrite || result == StoreInsResult.SuccessOverwriteNearEviction) {
//update the cache item version...
if (isUserOperation) {
cacheEntry.UpdateVersion(oldEntry);
}
//update the cache item last modifeid time...
cacheEntry.UpdateLastModifiedTime(oldEntry);
if (_evictionPolicy != null) {
_evictionPolicy.Notify(key, peEvh, cacheEntry.getEvictionHint());
}
}
if (result == StoreInsResult.NotEnoughSpace && !_notifyCacheFull) {
_notifyCacheFull = true;
_context.getCacheLog().Error("LocalCache.InsertInternal", "The cache is full and not enough items could be evicted.");
}
if(_context.PerfStatsColl != null)
{
if(_evictionPolicy != null)
{
_context.PerfStatsColl.SetEvictionIndexSize(_evictionPolicy.getIndexInMemorySize());
}
if(_context.ExpiryMgr != null)
{
_context.PerfStatsColl.SetExpirationIndexSize(_context.ExpiryMgr.getIndexInMemorySize());
}
}
switch (result) {
case Success:
return CacheInsResult.Success;
case SuccessOverwrite:
return CacheInsResult.SuccessOverwrite;
case NotEnoughSpace:
return CacheInsResult.NeedsEviction;
case SuccessNearEviction:
return CacheInsResult.SuccessNearEvicition;
case SuccessOverwriteNearEviction:
return CacheInsResult.SuccessOverwriteNearEviction;
}
return CacheInsResult.Failure;
}
@Override
public CacheEntry RemoveInternal(Object key, ItemRemoveReason removalReason, boolean isUserOperation, OperationContext operationContext) throws StateTransferException, CacheException {
if (ServerMonitor.getMonitorActivity()) {
ServerMonitor.LogClientActivity("LocalCache.Remove", "");
}
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
if(operationContext.Contains(OperationContextFieldName.DeleteParams))
{
DeleteParams deleteParams = (DeleteParams)operationContext.GetValueByField(OperationContextFieldName.DeleteParams);
if(deleteParams.CompareOldValue)
{
//Compare current value with params value
//if not match return null
CacheEntry pe = GetInternal(key, false, operationContext);
if(pe!= null && !pe.valueEquals(deleteParams.OldValue)) {
operationContext.Add(OperationContextFieldName.ExtendExpiry, pe);
return null;
}
}
}
CacheEntry e = (CacheEntry) _cacheStore.Remove(key);
if (e != null) {
if (_evictionPolicy != null && e.getEvictionHint() != null) {
_evictionPolicy.Remove(key, e.getEvictionHint());
}
if (_notifyCacheFull) {
_notifyCacheFull = false;
}
}
if(_context.PerfStatsColl != null)
{
if(_evictionPolicy != null)
_context.PerfStatsColl.SetEvictionIndexSize((long)_evictionPolicy.getIndexInMemorySize());
if(_context.ExpiryMgr != null)
_context.PerfStatsColl.SetExpirationIndexSize((long)_context.ExpiryMgr.getIndexInMemorySize());
}
return e;
}
@Override
public ResetableIterator GetEnumerator() throws OperationFailedException, LockingException, GeneralFailureException {
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
return (ResetableIterator) new ResetableCacheStoreIterator(_cacheStore);
}
class ResetableCacheStoreIterator implements ResetableIterator {
public ICacheStorage cacheStore;
Iterator iter;
public ResetableCacheStoreIterator(ICacheStorage cacheStore) {
this.cacheStore = cacheStore;
iter = cacheStore.GetEnumerator();
}
@Override
public void reset() {
iter = cacheStore.GetEnumerator();
}
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public Object next() {
return iter.next();
}
@Override
public void remove() {
iter.remove();
}
}
@Override
//: Array -Object[]
public Object[] getKeys() {
if (_cacheStore == null) {
throw new UnsupportedOperationException();
}
return _cacheStore.getKeys();
}
@Override
public void Evict() {
if (_evictionPolicy == null) {
return;
}
synchronized (_eviction_sync_mutex) {
if (_parentCache.getIsEvictionAllowed()) {
if (_allowAsyncEviction) {
if (_evictionThread == null) {
_evictionThread = new Thread(this);
_evictionThread.setDaemon(true);
_evictionThread.start();
}
} else {
DoEvict(this);
}
}
}
}
private void DoEvict(CacheBase cache) {
try {
if (_evictionPolicy != null) {
_evictionPolicy.Execute(cache, _context, getSize());
if (_allowExplicitGCCollection) {
}
}
} finally {
}
}
private void EvictAysnc() {
try {
if (!getIsSelfInternal()) {
DoEvict(_context.getCacheImpl());
} else {
DoEvict(_context.getCacheInternal());
}
}
catch (Exception e) {
if (_context != null) {
_context.getCacheLog().Error("LocalCache._evictionRun", e.toString());
}
} finally {
synchronized (_eviction_sync_mutex) {
_evictionThread = null;
}
}
}
}
| |
package org.hbird.transport.payloadcodec.codecparameters.number;
import static org.junit.Assert.assertEquals;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.BitSet;
import org.hbird.core.commons.util.BitSetUtility;
import org.hbird.core.spacesystemmodel.encoding.Encoding;
import org.hbird.core.spacesystemmodel.encoding.Encoding.BinaryRepresentation;
import org.hbird.core.spacesystemmodel.tmtc.Parameter;
import org.hbird.core.spacesystemmodel.tmtc.provided.TelemeteredParameter;
import org.hbird.transport.payloadcodec.codecparameters.CodecParameter;
import org.hbird.transport.payloadcodec.exceptions.IncorrectJavaTypeParameter;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UnsignedIntegerCodecParameterTest {
private final static Logger LOG = LoggerFactory.getLogger(UnsignedIntegerCodecParameterTest.class);
private static int tEnc = 0;
private static int tDec = 0;
private final static int MIN_UNSIGNED_ALL = 0;
private final static int MAX_UNSIGNED_BYTE = 255;
private final static int MAX_UNSIGNED_SHORT = 65535;
private UnsignedIntegerCodecParameter codec;
private final static String TEST_STR_VALUE_BE_555 = "1000101011";
private final static int TEST_VALUE_LENGTH_BE_555 = 10;
private static BitSet TEST_BITSET_VALUE_BE_555;
private final static String TEST_STR_VALUE_1 = "1";
private final static int TEST_VALUE_LENGTH_1 = 1;
private static BitSet TEST_BITSET_VALUE_1;
private final static String TEST_STR_VALUE_LE_555 = "0010101110";
private final static int TEST_VALUE_LENGTH_LE_555 = 10;
private static BitSet TEST_BITSET_VALUE_LE_555;
private final static String TEST_STR_VALUE_BE_1024 = "10000000000";
private final static int TEST_VALUE_LENGTH_1024 = 11;
private static BitSet TEST_BITSET_VALUE_BE_1024;
private static final String TEST_STR_VALUE_BE_123_32bit = "00000000000000000000000001111011";
private static final int TEST_VALUE_LENGTH_BE_123_32bit = 32;
private static BitSet TEST_BITSET_VALUE_123_32bit;
private static final String TEST_STR_VALUE_LE_123_32bit = "01111011000000000000000000000000";
private static final int TEST_VALUE_LENGTH_LE_123_32bit = 32;
private static BitSet TEST_BITSET_VALUE_123_LE_32bit;
/**
* The BE 999 is consists of a leading 0. Testing with this value will confirm whether we deal with this as it could
* cause issues with the BitSet
**/
private final static String TEST_STR_VALUE_BE_999 = "01111100111";
private final static int TEST_VALUE_LENGTH_BE_999 = 11;
private static BitSet TEST_BITSET_VALUE_BE_999;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
LOG.debug("Setting up test bitsets....");
LOG.debug("Creating BE 555 Bitset");
TEST_BITSET_VALUE_BE_555 = new BitSet();
TEST_BITSET_VALUE_BE_555.set(0);
TEST_BITSET_VALUE_BE_555.set(4);
TEST_BITSET_VALUE_BE_555.set(6);
TEST_BITSET_VALUE_BE_555.set(8);
TEST_BITSET_VALUE_BE_555.set(9);
assertEquals(TEST_STR_VALUE_BE_555, BitSetUtility.bitSetToBinaryString(TEST_BITSET_VALUE_BE_555, true));
LOG.debug("Creating LE 555 Bitset");
// 0010101110
TEST_BITSET_VALUE_LE_555 = new BitSet();
TEST_BITSET_VALUE_LE_555.set(2);
TEST_BITSET_VALUE_LE_555.set(4);
TEST_BITSET_VALUE_LE_555.set(6);
TEST_BITSET_VALUE_LE_555.set(7);
TEST_BITSET_VALUE_LE_555.set(8);
assertEquals(TEST_STR_VALUE_LE_555, BitSetUtility.bitSetToBinaryString(TEST_BITSET_VALUE_LE_555, TEST_VALUE_LENGTH_LE_555));
TEST_BITSET_VALUE_BE_999 = BitSetUtility.stringToBitSet(TEST_STR_VALUE_BE_999, true, true);
assertEquals(TEST_STR_VALUE_BE_999, BitSetUtility.bitSetToBinaryString(TEST_BITSET_VALUE_BE_999, true));
LOG.debug("Creating BE 123 32bit Bitset");
TEST_BITSET_VALUE_123_32bit = BitSetUtility.stringToBitSet(TEST_STR_VALUE_BE_123_32bit, true, true);
assertEquals(TEST_STR_VALUE_BE_123_32bit, BitSetUtility.bitSetToBinaryString(TEST_BITSET_VALUE_123_32bit, true));
LOG.debug("Creating BE 1024 Bitset");
TEST_BITSET_VALUE_BE_1024 = new BitSet(TEST_VALUE_LENGTH_1024);
TEST_BITSET_VALUE_BE_1024.set(0);
assertEquals(TEST_STR_VALUE_BE_1024, BitSetUtility.bitSetToBinaryString(TEST_BITSET_VALUE_BE_1024, false).substring(0, TEST_VALUE_LENGTH_1024));
TEST_BITSET_VALUE_123_LE_32bit = new BitSet(TEST_VALUE_LENGTH_LE_123_32bit);
TEST_BITSET_VALUE_123_LE_32bit.set(1);
TEST_BITSET_VALUE_123_LE_32bit.set(2);
TEST_BITSET_VALUE_123_LE_32bit.set(3);
TEST_BITSET_VALUE_123_LE_32bit.set(4);
TEST_BITSET_VALUE_123_LE_32bit.set(6);
TEST_BITSET_VALUE_123_LE_32bit.set(7);
assertEquals(TEST_STR_VALUE_LE_123_32bit,
BitSetUtility.bitSetToBinaryString(TEST_BITSET_VALUE_123_LE_32bit, false).substring(0, TEST_VALUE_LENGTH_LE_123_32bit));
TEST_BITSET_VALUE_1 = new BitSet(TEST_VALUE_LENGTH_1);
TEST_BITSET_VALUE_1.set(0);
assertEquals(TEST_STR_VALUE_1, BitSetUtility.bitSetToBinaryString(TEST_BITSET_VALUE_1, false).substring(0, TEST_VALUE_LENGTH_1));
LOG.debug("Test bitsets set-up completed successfully");
}
@AfterClass
public static void testsFinished() {
LOG.info("Test summary");
LOG.info("------------------------");
LOG.info("Total encodes: " + tEnc);
LOG.info("Total decodes: " + tDec);
}
@Test
public void testBigEndianValueFromBitSet() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("test.unit", "uint", "", "");
Encoding enc = new Encoding(TEST_VALUE_LENGTH_BE_555, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter codec = new UnsignedIntegerCodecParameter(p, enc);
codec.decode(TEST_BITSET_VALUE_BE_555, 0);
tDec++;
assertEquals(555, codec.getValue().intValue());
}
@Test
public void testOneValueFromBitSet() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("test.unit", "uint", "", "");
Encoding enc = new Encoding(TEST_VALUE_LENGTH_1, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter codec = new UnsignedIntegerCodecParameter(p, enc);
codec.decode(TEST_BITSET_VALUE_1, 0);
tDec++;
assertEquals(1, codec.getValue().intValue());
}
@Test
public void testLittleEndianValueFromBitSet() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("test.unit", "uint", "", "");
Encoding enc = new Encoding(TEST_VALUE_LENGTH_LE_555, BinaryRepresentation.unsigned, ByteOrder.LITTLE_ENDIAN);
UnsignedIntegerCodecParameter codec = new UnsignedIntegerCodecParameter(p, enc);
codec.decode(TEST_BITSET_VALUE_LE_555, 0);
tDec++;
assertEquals(555, codec.getValue().intValue());
}
@Test
public void testBigEndianBoundaryValueFromBitSet() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(TEST_VALUE_LENGTH_1024, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter codec = new UnsignedIntegerCodecParameter(p, enc);
codec.decode(TEST_BITSET_VALUE_BE_1024, 0);
tDec++;
assertEquals(1024, codec.getValue().intValue());
}
@Test
public void testLitteEndianFromBitSet() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(TEST_VALUE_LENGTH_LE_123_32bit, BinaryRepresentation.unsigned, ByteOrder.LITTLE_ENDIAN);
UnsignedIntegerCodecParameter codec = new UnsignedIntegerCodecParameter(p, enc);
codec.decode(TEST_BITSET_VALUE_123_LE_32bit, 0);
tDec++;
assertEquals(123, codec.getValue().intValue());
}
@Test
public void testZeroByteContainingValueFromBitSet() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(TEST_VALUE_LENGTH_BE_123_32bit, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter codecParam = new UnsignedIntegerCodecParameter(p, enc);
codecParam.decode(TEST_BITSET_VALUE_123_32bit, 0);
tDec++;
assertEquals(123, codecParam.getValue().intValue());
}
@SuppressWarnings("static-method")
@Test
public final void testLeadingZeroBE999ValueFromBitSet() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(TEST_VALUE_LENGTH_BE_999, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter behaviour = new UnsignedIntegerCodecParameter(p, enc);
behaviour.decode(TEST_BITSET_VALUE_BE_999, 0);
tDec++;
assertEquals(999, behaviour.getValue().intValue());
}
@SuppressWarnings("static-method")
@Test
public final void testDecodeByteFullRange() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(Byte.SIZE, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter behaviour = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i < MAX_UNSIGNED_BYTE; i++) {
BitSet input = createTestBitSet(i, enc.getSizeInBits(), enc.getByteOrder());
behaviour.decode(input, 0);
tDec++;
assertEquals(i, behaviour.getValue().intValue());
}
}
@SuppressWarnings("static-method")
@Test
public final void testDecodeLittleEndianByteFullRange() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(Byte.SIZE, BinaryRepresentation.unsigned, ByteOrder.LITTLE_ENDIAN);
UnsignedIntegerCodecParameter behaviour = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i < MAX_UNSIGNED_BYTE; i++) {
BitSet input = createTestBitSet(i, enc.getSizeInBits(), enc.getByteOrder());
behaviour.decode(input, 0);
tDec++;
assertEquals(i, behaviour.getValue().intValue());
}
}
@SuppressWarnings("static-method")
@Test
public final void testDecodeShortFullRange() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(Short.SIZE, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter behaviour = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i < MAX_UNSIGNED_SHORT; i++) {
BitSet input = createTestBitSet(i, enc.getSizeInBits(), enc.getByteOrder());
behaviour.decode(input, 0);
tDec++;
assertEquals(i, behaviour.getValue().intValue());
}
}
@SuppressWarnings("static-method")
@Test
public final void testDecodeLittleEndianShortFullRange() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(Short.SIZE, BinaryRepresentation.unsigned, ByteOrder.LITTLE_ENDIAN);
UnsignedIntegerCodecParameter behaviour = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i < MAX_UNSIGNED_SHORT; i++) {
BitSet input = createTestBitSet(i, enc.getSizeInBits(), enc.getByteOrder());
behaviour.decode(input, 0);
tDec++;
assertEquals(i, behaviour.getValue().intValue());
}
}
@SuppressWarnings("static-method")
@Test
public final void testDecodeIntLargeRange() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(Integer.SIZE, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter behaviour = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i < 520509; i++) {
BitSet input = createTestBitSet(i, enc.getSizeInBits(), enc.getByteOrder());
behaviour.decode(input, 0);
tDec++;
assertEquals(i, behaviour.getValue().intValue());
}
}
@SuppressWarnings("static-method")
@Test(expected = IncorrectJavaTypeParameter.class)
public final void testIncorrectIntJavaTypeDecode() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(56, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter behaviour = new UnsignedIntegerCodecParameter(p, enc);
BitSet input = createTestBitSet(87, enc.getSizeInBits(), enc.getByteOrder());
behaviour.decode(input, 0);
tDec++;
assertEquals(87, behaviour.getValue().intValue());
}
@SuppressWarnings("static-method")
@Test
public final void testDecodeLittleEndianIntLargeRange() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("", "", "", "");
Encoding enc = new Encoding(Integer.SIZE, BinaryRepresentation.unsigned, ByteOrder.LITTLE_ENDIAN);
UnsignedIntegerCodecParameter behaviour = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i < 520509; i++) {
BitSet input = createTestBitSet(i, enc.getSizeInBits(), enc.getByteOrder());
behaviour.decode(input, 0);
tDec++;
assertEquals(i, behaviour.getValue().intValue());
}
}
private static BitSet createTestBitSet(int i, int sizeInBits, ByteOrder byteOrder) {
int numberOfBytes = sizeInBits / Byte.SIZE;
// Any remaining bits require an extra Byte
if (sizeInBits % Byte.SIZE != 0) {
numberOfBytes++;
}
ByteBuffer buf = ByteBuffer.allocate(numberOfBytes);
ByteBuffer littleEndianBuffer = ByteBuffer.allocate(numberOfBytes);
if (sizeInBits <= 8) {
buf.put((byte) i).flip();
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
buf.order(ByteOrder.LITTLE_ENDIAN);
byte leVersion = buf.get();
buf.flip();
littleEndianBuffer.put(leVersion);
buf = littleEndianBuffer;
}
}
else if (sizeInBits <= 16) {
buf.putShort((short) i).flip();
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
buf.order(ByteOrder.LITTLE_ENDIAN);
short leVersion = buf.getShort();
buf.flip();
littleEndianBuffer.putShort(leVersion);
buf = littleEndianBuffer;
}
}
else if (sizeInBits <= 32) {
buf.putInt(i).flip();
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
buf.order(ByteOrder.LITTLE_ENDIAN);
int leVersion = buf.getInt();
buf.flip();
littleEndianBuffer.putInt(leVersion);
buf = littleEndianBuffer;
}
}
return BitSetUtility.fromByteArray(buf.array());
}
@SuppressWarnings("static-method")
@Test
public final void testEncode10BitIntegerBE555() {
BitSet actual = new BitSet();
Parameter<Integer> parameter = new TelemeteredParameter<Integer>("", "", "", "");
parameter.setValue(555);
Encoding enc = new Encoding(TEST_VALUE_LENGTH_BE_555, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter codecParameter = new UnsignedIntegerCodecParameter(parameter, enc);
actual = codecParameter.encodeToBitSet(actual, 0);
tEnc++;
assertEquals(actual, TEST_BITSET_VALUE_BE_555);
}
@SuppressWarnings("static-method")
@Test
public final void testEncode1BitFlagIntegerOn() {
BitSet actual = new BitSet();
Parameter<Integer> parameter = new TelemeteredParameter<Integer>("", "", "", "");
parameter.setValue(1);
Encoding enc = new Encoding(TEST_VALUE_LENGTH_1, BinaryRepresentation.unsigned);
UnsignedIntegerCodecParameter codecParameter = new UnsignedIntegerCodecParameter(parameter, enc);
actual = codecParameter.encodeToBitSet(actual, 0);
tEnc++;
assertEquals(actual, TEST_BITSET_VALUE_1);
}
@Test
public final void testByteFullRangeEncode() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("test.p", "byteGaben", "", "");
Encoding enc = new Encoding(Byte.SIZE, BinaryRepresentation.unsigned);
codec = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i <= MAX_UNSIGNED_BYTE; i++) {
p.setValue(i);
byte[] value = { (byte) i };
BitSet expected = BitSetUtility.fromByteArray(value);
encodeAndAssert(codec, expected);
}
}
@Test
public final void testShortFullRangeEncode() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("test.p", "byteGaben", "", "");
Encoding enc = new Encoding(Short.SIZE, BinaryRepresentation.unsigned);
codec = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i <= MAX_UNSIGNED_SHORT; i++) {
p.setValue(i);
ByteBuffer buffer = ByteBuffer.allocate(2);
buffer.putShort((short) i);
BitSet expected = BitSetUtility.fromByteArray(buffer.array());
encodeAndAssert(codec, expected);
}
}
@Test
public final void testIntLargeRangeEncode() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("test.p", "byteGaben", "", "");
Encoding enc = new Encoding(Integer.SIZE, BinaryRepresentation.unsigned);
codec = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i <= 120509; i++) {
p.setValue(i);
ByteBuffer buffer = ByteBuffer.allocate(4);
buffer.putInt(i);
BitSet expected = BitSetUtility.fromByteArray(buffer.array());
encodeAndAssert(codec, expected);
}
}
@Test
public final void testLittleEndianShortFullRangeEncode() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("test.p", "byteGaben", "", "");
Encoding enc = new Encoding(Short.SIZE, BinaryRepresentation.unsigned, ByteOrder.LITTLE_ENDIAN);
codec = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i <= MAX_UNSIGNED_SHORT; i++) {
p.setValue(i);
// Create little endian version of the short and a little endian version of the short
// as a BitSet for the assert.
ByteBuffer buffer = ByteBuffer.allocate(2);
buffer.putShort((short) i).flip();
buffer.order(ByteOrder.LITTLE_ENDIAN);
short littleEndianValue = buffer.getShort();
ByteBuffer littleEndianBuffer = ByteBuffer.allocate(2);
littleEndianBuffer.putShort(littleEndianValue);
BitSet expected = BitSetUtility.fromByteArray(littleEndianBuffer.array());
encodeAndAssert(codec, expected);
}
}
@Test
public final void testLittleEndianByteFullRangeEncode() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("test.p", "byteGaben", "", "");
Encoding enc = new Encoding(Byte.SIZE, BinaryRepresentation.unsigned, ByteOrder.LITTLE_ENDIAN);
codec = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i <= MAX_UNSIGNED_BYTE; i++) {
p.setValue(i);
// Create little endian version of the short and a little endian version of the short
// as a BitSet for the assert.
ByteBuffer buffer = ByteBuffer.allocate(1);
buffer.put((byte) i).flip();
buffer.order(ByteOrder.LITTLE_ENDIAN);
byte littleEndianValue = buffer.get();
ByteBuffer littleEndianBuffer = ByteBuffer.allocate(1);
littleEndianBuffer.put(littleEndianValue);
BitSet expected = BitSetUtility.fromByteArray(littleEndianBuffer.array());
encodeAndAssert(codec, expected);
}
}
@Test(expected = IncorrectJavaTypeParameter.class)
public final void testInccorectIntTypeEncode() {
Parameter<Integer> p = new TelemeteredParameter<Integer>("test.p", "byteGaben", "", "");
Encoding enc = new Encoding(45, BinaryRepresentation.unsigned);
codec = new UnsignedIntegerCodecParameter(p, enc);
for (int i = MIN_UNSIGNED_ALL; i <= MAX_UNSIGNED_BYTE; i++) {
p.setValue(i);
// Create little endian version of the short and a little endian version of the short
// as a BitSet for the assert.
ByteBuffer buffer = ByteBuffer.allocate(1);
buffer.put((byte) i).flip();
buffer.order(ByteOrder.LITTLE_ENDIAN);
byte littleEndianValue = buffer.get();
ByteBuffer littleEndianBuffer = ByteBuffer.allocate(1);
littleEndianBuffer.put(littleEndianValue);
BitSet expected = BitSetUtility.fromByteArray(littleEndianBuffer.array());
encodeAndAssert(codec, expected);
}
}
private static void encodeAndAssert(CodecParameter<Integer> codec, BitSet expected) {
BitSet actual = new BitSet();
codec.encodeToBitSet(actual, 0);
tEnc++;
assertEquals("The codec encoding result should match this bitset: " + expected, expected, actual);
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.util;
import com.intellij.openapi.util.io.PathExecLazyValue;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.lang.JavaVersion;
import org.jetbrains.annotations.NotNull;
import java.util.List;
import java.util.Locale;
import static com.intellij.openapi.util.text.StringUtil.containsIgnoreCase;
import static com.intellij.util.ObjectUtils.notNull;
/**
* Provides information about operating system, system-wide settings, and Java Runtime.
*/
@SuppressWarnings({"HardCodedStringLiteral", "UnusedDeclaration"})
public class SystemInfo extends SystemInfoRt {
public static final String OS_NAME = SystemInfoRt.OS_NAME;
public static final String OS_VERSION = SystemInfoRt.OS_VERSION;
public static final String OS_ARCH = System.getProperty("os.arch");
public static final String JAVA_VERSION = System.getProperty("java.version");
public static final String JAVA_RUNTIME_VERSION = getRtVersion(JAVA_VERSION);
public static final String JAVA_VENDOR = System.getProperty("java.vm.vendor", "Unknown");
/**
* @deprecated use {@link #is32Bit} or {@link #is64Bit} instead
*/
@Deprecated public static final String ARCH_DATA_MODEL = System.getProperty("sun.arch.data.model");
public static final String SUN_DESKTOP = System.getProperty("sun.desktop", "");
private static String getRtVersion(@SuppressWarnings("SameParameterValue") String fallback) {
String rtVersion = System.getProperty("java.runtime.version");
return Character.isDigit(rtVersion.charAt(0)) ? rtVersion : fallback;
}
public static final boolean isWindows = SystemInfoRt.isWindows;
public static final boolean isMac = SystemInfoRt.isMac;
public static final boolean isLinux = SystemInfoRt.isLinux;
public static final boolean isFreeBSD = SystemInfoRt.isFreeBSD;
public static final boolean isSolaris = SystemInfoRt.isSolaris;
public static final boolean isUnix = SystemInfoRt.isUnix;
public static final boolean isAppleJvm = containsIgnoreCase(JAVA_VENDOR, "Apple");
public static final boolean isOracleJvm = containsIgnoreCase(JAVA_VENDOR, "Oracle");
public static final boolean isSunJvm = containsIgnoreCase(JAVA_VENDOR, "Sun") && containsIgnoreCase(JAVA_VENDOR, "Microsystems");
public static final boolean isIbmJvm = containsIgnoreCase(JAVA_VENDOR, "IBM");
public static final boolean isAzulJvm = containsIgnoreCase(JAVA_VENDOR, "Azul");
public static final boolean isJetBrainsJvm = containsIgnoreCase(JAVA_VENDOR, "JetBrains");
public static final boolean IS_AT_LEAST_JAVA9 = isModularJava();
@SuppressWarnings("JavaReflectionMemberAccess")
private static boolean isModularJava() {
try {
Class.class.getMethod("getModule");
return true;
}
catch (Throwable t) {
return false;
}
}
public static boolean isOsVersionAtLeast(@NotNull String version) {
return StringUtil.compareVersionNumbers(OS_VERSION, version) >= 0;
}
/* version numbers from http://msdn.microsoft.com/en-us/library/windows/desktop/ms724832.aspx */
public static final boolean isWin2kOrNewer = isWindows && isOsVersionAtLeast("5.0");
public static final boolean isWinXpOrNewer = isWindows && isOsVersionAtLeast("5.1");
public static final boolean isWinVistaOrNewer = isWindows && isOsVersionAtLeast("6.0");
public static final boolean isWin7OrNewer = isWindows && isOsVersionAtLeast("6.1");
public static final boolean isWin8OrNewer = isWindows && isOsVersionAtLeast("6.2");
public static final boolean isWin10OrNewer = isWindows && isOsVersionAtLeast("10.0");
public static final boolean isXWindow = isUnix && !isMac;
public static final boolean isWayland = isXWindow && !StringUtil.isEmpty(System.getenv("WAYLAND_DISPLAY"));
/* http://askubuntu.com/questions/72549/how-to-determine-which-window-manager-is-running/227669#227669 */
public static final boolean isGNOME = isXWindow &&
(notNull(System.getenv("GDMSESSION"), "").startsWith("gnome") ||
notNull(System.getenv("XDG_CURRENT_DESKTOP"), "").toLowerCase(Locale.ENGLISH).endsWith("gnome"));
/* https://userbase.kde.org/KDE_System_Administration/Environment_Variables#KDE_FULL_SESSION */
public static final boolean isKDE = isXWindow && !StringUtil.isEmpty(System.getenv("KDE_FULL_SESSION"));
public static final boolean isMacSystemMenu = isMac && "true".equals(System.getProperty("apple.laf.useScreenMenuBar"));
public static final boolean isFileSystemCaseSensitive = SystemInfoRt.isFileSystemCaseSensitive;
public static final boolean areSymLinksSupported = isUnix || isWinVistaOrNewer;
public static final boolean is32Bit = SystemInfoRt.is32Bit;
public static final boolean is64Bit = SystemInfoRt.is64Bit;
public static final boolean isMacIntel64 = isMac && "x86_64".equals(OS_ARCH);
private static final NotNullLazyValue<Boolean> ourHasXdgOpen = new PathExecLazyValue("xdg-open");
public static boolean hasXdgOpen() {
return isXWindow && ourHasXdgOpen.getValue();
}
private static final NotNullLazyValue<Boolean> ourHasXdgMime = new PathExecLazyValue("xdg-mime");
public static boolean hasXdgMime() {
return isXWindow && ourHasXdgMime.getValue();
}
public static final boolean isMacOSTiger = isMac && isOsVersionAtLeast("10.4");
public static final boolean isMacOSLeopard = isMac && isOsVersionAtLeast("10.5");
public static final boolean isMacOSSnowLeopard = isMac && isOsVersionAtLeast("10.6");
public static final boolean isMacOSLion = isMac && isOsVersionAtLeast("10.7");
public static final boolean isMacOSMountainLion = isMac && isOsVersionAtLeast("10.8");
public static final boolean isMacOSMavericks = isMac && isOsVersionAtLeast("10.9");
public static final boolean isMacOSYosemite = isMac && isOsVersionAtLeast("10.10");
public static final boolean isMacOSElCapitan = isMac && isOsVersionAtLeast("10.11");
public static final boolean isMacOSSierra = isMac && isOsVersionAtLeast("10.12");
public static final boolean isMacOSHighSierra = isMac && isOsVersionAtLeast("10.13");
public static final boolean isMacOSMojave = isMac && isOsVersionAtLeast("10.14");
@NotNull
public static String getMacOSMajorVersion() {
return getMacOSMajorVersion(OS_VERSION);
}
public static String getMacOSMajorVersion(String version) {
int[] parts = getMacOSVersionParts(version);
return String.format("%d.%d", parts[0], parts[1]);
}
@NotNull
public static String getMacOSVersionCode() {
return getMacOSVersionCode(OS_VERSION);
}
@NotNull
public static String getMacOSMajorVersionCode() {
return getMacOSMajorVersionCode(OS_VERSION);
}
@NotNull
public static String getMacOSMinorVersionCode() {
return getMacOSMinorVersionCode(OS_VERSION);
}
@NotNull
public static String getMacOSVersionCode(@NotNull String version) {
int[] parts = getMacOSVersionParts(version);
return String.format("%02d%d%d", parts[0], normalize(parts[1]), normalize(parts[2]));
}
@NotNull
public static String getMacOSMajorVersionCode(@NotNull String version) {
int[] parts = getMacOSVersionParts(version);
return String.format("%02d%d%d", parts[0], normalize(parts[1]), 0);
}
@NotNull
public static String getMacOSMinorVersionCode(@NotNull String version) {
int[] parts = getMacOSVersionParts(version);
return String.format("%02d%02d", parts[1], parts[2]);
}
private static int[] getMacOSVersionParts(@NotNull String version) {
List<String> parts = StringUtil.split(version, ".");
while (parts.size() < 3) {
parts.add("0");
}
return new int[]{toInt(parts.get(0)), toInt(parts.get(1)), toInt(parts.get(2))};
}
public static String getOsNameAndVersion() {
String osName = System.getProperty("os.name");
if (isMacOSSierra) {
osName = "macOS"; //JDK always returns Mac OS X
}
return osName + " " + System.getProperty("os.version");
}
private static int normalize(int number) {
return number > 9 ? 9 : number;
}
private static int toInt(String string) {
try {
return Integer.valueOf(string);
}
catch (NumberFormatException e) {
return 0;
}
}
public static boolean isJavaVersionAtLeast(int major, int minor, int update) {
return JavaVersion.current().compareTo(JavaVersion.compose(major, minor, update, 0, false)) >= 0;
}
//<editor-fold desc="Deprecated stuff.">
/** @deprecated use {@link #isJavaVersionAtLeast(int, int, int)} (to be removed in IDEA 2020) */
@Deprecated
public static boolean isJavaVersionAtLeast(String v) {
return StringUtil.compareVersionNumbers(JAVA_RUNTIME_VERSION, v) >= 0;
}
/** @deprecated use {@link #isWinXpOrNewer} (to be removed in IDEA 2018) */
@Deprecated public static final boolean isWindowsXP = isWindows && (OS_VERSION.equals("5.1") || OS_VERSION.equals("5.2"));
/** @deprecated use {@link #is32Bit} or {@link #is64Bit} (to be removed in IDEA 2018) */
@Deprecated public static final boolean isAMD64 = "amd64".equals(OS_ARCH);
//</editor-fold>
}
| |
/** This file is released under the Apache License 2.0. See the LICENSE file for details. **/
package com.github.kskelm.baringo;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import com.github.kskelm.baringo.model.Image;
import com.github.kskelm.baringo.model.ImgurResponseWrapper;
import com.github.kskelm.baringo.util.BaringoApiException;
import com.github.kskelm.baringo.util.BaringoAuthException;
import com.google.gson.GsonBuilder;
import com.squareup.okhttp.MediaType;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.RequestBody;
import retrofit.Call;
import retrofit.Response;
/**
*
* Services related to manipulating images in Imgur (why we're here, assumedly)
* @author Kevin Kelm (triggur@gmail.com)
*/
public class ImageService {
/**
* Given an image id, return info about the Image object for it.
* <p>
* <b>ACCESS: ANONYMOUS</b>
* @param id the id of the image, for example "PgZtz0j".
* If a user is logged in and this image is theirs, the
* deleteHash property will be filled in. It will otherwise
* be null
* @return Image object
* @throws BaringoApiException something went pear-shaped
*/
public Image getImageInfo( String id ) throws BaringoApiException {
Call<ImgurResponseWrapper<Image>> call =
client.getApi().getImageInfo( id );
try {
Response<ImgurResponseWrapper<Image>> res = call.execute();
ImgurResponseWrapper<Image> out = res.body();
client.throwOnWrapperError( res );
return out.getData();
} catch (IOException e) {
throw new BaringoApiException( e.getMessage() );
} // try-catch
}
/**
* Upload an image to Imgur by pointing at a Url on the internet.
* Must be available openly without authentication.
* <p>
* <b>ACCESS: ANONYMOUS</b> or <b>AUTHENTICATED USER</b>
* @param Url the full URL of the image.
* @param fileName original of the file being uploaded (pick something)
* @param albumId the name of the album, the album's deleteHash if it's anonymous, or null if none
* @param title title of image or null if none
* @param description description of image or null if none
* @return The new Image object. If this is anonymous, <i>hang on to the delete hash</i> or you won't be able to manipulate it in the future!
* @throws BaringoApiException something terrible happened to Stuart!
*/
public Image uploadUrlImage(
String Url,
String fileName,
String albumId,
String title,
String description ) throws BaringoApiException {
RequestBody body = RequestBody.create(
MediaType.parse("text/plain"), Url );
Call<ImgurResponseWrapper<Image>> call =
client.getApi().uploadUrlImage(
albumId,
"URL",
title,
description,
body );
try {
Response<ImgurResponseWrapper<Image>> res = call.execute();
ImgurResponseWrapper<Image> out = res.body();
client.throwOnWrapperError( res );
return out.getData();
} catch (IOException e) {
throw new BaringoApiException( e.getMessage() );
} // try-catch
}
/**
* Upload an image to Imgur as a stream from the local filesystem.
* Use a buffered stream wherever possible!
* <p>
* <b>ACCESS: ANONYMOUS</b> or <b>AUTHENTICATED USER</b>
* @param mimeType mime type like image/png. If null, Baringo will try to infer this from the fileName.
* @param fileName name of the file being uploaded
* @param albumId the name of the album, the album's deleteHash if it's anonymous, or null if none
* @param title title of image or null if none
* @param description description of image or null if none
* @return The new Image object. If this is anonymous, <i>hang on to the delete hash</i> or you won't be able to manipulate it in the future!
* @throws IOException Something was wrong with the file or streaming didn't work
* @throws BaringoApiException que sera sera
*/
public Image uploadLocalImage(
String mimeType,
String fileName,
String albumId,
String title,
String description ) throws IOException, BaringoApiException { // can be null
File file = new File( fileName );
if( !file.exists() ) {
throw new FileNotFoundException( "File not found: " + fileName );
} // if
if( !file.canRead() ) {
throw new IOException( "Cannot access file " + fileName );
} // if
if( mimeType == null ) { // infer from file prefix
int dotAt = fileName.lastIndexOf( '.' );
if( dotAt == -1 ) {
throw new BaringoApiException( "Could not infer mime type"
+ " from file name; no extension" );
} // if
String ext = fileName.substring( dotAt + 1 ).toLowerCase();
mimeType = extensionToMimeType.get( ext );
if( mimeType == null ) {
throw new BaringoApiException( "Could not infer mime type"
+ " from extension '" + ext + "'" );
} // if
} // if
// strip the directory hierarchy off the filename.
Path path = Paths.get( fileName );
fileName = path.getFileName().toString();
RequestBody body = RequestBody.create( MediaType.parse(mimeType), file );
Call<ImgurResponseWrapper<Image>> call =
client.getApi().uploadLocalImage(
albumId,
"file",
title,
description,
fileName,
body );
try {
Response<ImgurResponseWrapper<Image>> res = call.execute();
ImgurResponseWrapper<Image> out = res.body();
client.throwOnWrapperError( res );
return out.getData();
} catch (IOException e) {
throw new BaringoApiException( e.getMessage() );
} // try-catch
}
/**
* Given an image id and an output stream, download the image
* and write it to the stream. It is the caller's responsibility
* to close everything.
* <p>
* NOTE: This is synchronous.
* <p>
* <b>ACCESS: ANONYMOUS</b>
* @param imageLink the image link to download (could be a thumb too)
* @param outStream an output stream to write the data to
* @return the number of bytes written
* @throws IOException could be anything really
* @throws BaringoApiException Imgur didn't like something
*/
public long downloadImage(
String imageLink,
OutputStream outStream ) throws IOException, BaringoApiException {
Request request = new Request
.Builder()
.url( imageLink )
.build();
OkHttpClient client = new OkHttpClient();
com.squareup.okhttp.Response resp = client
.newCall( request )
.execute();
if( resp.code() != 200 || !resp.isSuccessful() ) {
throw new BaringoApiException( request.urlString()
+ ": " + resp.message(), resp.code() );
} // if
if( resp.body() == null ) {
throw new BaringoApiException( "No response body found" );
} // if
InputStream is = resp.body().byteStream();
BufferedInputStream input = new BufferedInputStream(is);
byte[] data = new byte[8192]; // because powers of two are magic
long total = 0;
int count = 0;
while ((count = input.read(data)) != -1) {
total += count;
outStream.write(data, 0, count);
} // while
return total;
}
/**
* Given an image id and a file path to store it to, download
* the image. File must be writeable and the path must exist.
* <p>
* NOTE: This is synchronous.
* <p>
* <b>ACCESS: ANONYMOUS</b>
* @param imageLink the image link to download (could be a thumb too)
* @param fileName name of the file to write to
* @return the number of bytes written
* @throws IOException myriad
* @throws BaringoApiException Imgur didn't like something
*/
public long downloadImage(
String imageLink,
String fileName ) throws IOException, BaringoApiException {
OutputStream output = null;
try {
output = new BufferedOutputStream(
new FileOutputStream( fileName) );
return downloadImage( imageLink, output );
} finally {
output.close();
} // try-finally
}
/**
* Updates an image with a new title and description
* <p>
* <b>ACCESS: ANONYMOUS</b> or AUTHENTICATED USER
* @param idOrDeleteHash If the image is anonymous, this is the delete hash. If not then it's an imageId and the currently-authenticated account must own it.
* @param title title of the image or null if none
* @param description description of the image or null if none
* @return true if it worked
* @throws BaringoApiException C'est la vie
*/
public boolean updateImage(
String idOrDeleteHash,
String title,
String description ) throws BaringoApiException {
Call<ImgurResponseWrapper<Boolean>> call =
client.getApi().updateImageInfo( idOrDeleteHash, title, description);
try {
Response<ImgurResponseWrapper<Boolean>> res = call.execute();
ImgurResponseWrapper<Boolean> out = res.body();
client.throwOnWrapperError( res );
return out.getData();
} catch (IOException e) {
throw new BaringoApiException( e.getMessage() );
} // try-catch
}
/**
* Deletes an image
* <p>
* <b>ACCESS: ANONYMOUS</b> or <b>AUTHENTICATED USER</b>
* @param idOrDeleteHash If the image is anonymous, this is the delete hash. If not then it's an imageId and the currently-authenticated account must own it.
* @return true if it worked
* @throws BaringoApiException C'est la vie
*/
public boolean deleteImage(
String idOrDeleteHash ) throws BaringoApiException {
Call<ImgurResponseWrapper<Boolean>> call =
client.getApi().deleteImage( idOrDeleteHash );
try {
Response<ImgurResponseWrapper<Boolean>> res = call.execute();
ImgurResponseWrapper<Boolean> out = res.body();
client.throwOnWrapperError( res );
return out.getData();
} catch (IOException e) {
throw new BaringoApiException( e.getMessage() );
} // try-catch
}
/**
* Marks the image as favorite for the currently-authenticated user.
* Note that the Image object needs to be in sync with Imgur because
* the site's API only acknowledges a toggle of the value. The image
* object is updated with the new status. If the image is already
* marked as a favorite, no action is taken.
* <p>
* NOTE: An account can't favorite its own images.
* <p>
* <b>ACCESS: AUTHENTICATED USER</b>
* @param image the image to favorite.
* @return the updated image object
* @throws BaringoApiException argh
*/
public Image favoriteImage( Image image ) throws BaringoApiException {
if( !client.authService().isUserAuthenticated() ) {
throw new BaringoAuthException( "No user logged in", 401 );
} // if
if( image.isFavorite() ) {
return image; // already done
} // if
Call<ImgurResponseWrapper<Image>> call =
client.getApi().toggleImageFavorite( image.getId() );
try {
Response<ImgurResponseWrapper<Image>> res = call.execute();
ImgurResponseWrapper<Image> out = res.body();
client.throwOnWrapperError( res );
return out.getData();
} catch (IOException e) {
throw new BaringoApiException( e.getMessage() );
}
}
/**
* Unfavorites the image for the currently-authenticated user.
* Note that the Image object needs to be in sync with Imgur because
* the site's API only acknowledges a toggle of the value. The image
* object is updated with the new status. If the image is already
* not favorited, no action is taken.
* <p>
* NOTE: An account can't unfavorite its own images.
* <p>
* <b>ACCESS: AUTHENTICATED USER</b>
* @param image the image to favorite.
* @return the updated image object
* @throws BaringoApiException argh
*/
public Image unfavoriteImage( Image image ) throws BaringoApiException {
if( !client.authService().isUserAuthenticated() ) {
throw new BaringoAuthException( "No user logged in", 401 );
} // if
if( !image.isFavorite() ) {
return image; // already done
} // if
Call<ImgurResponseWrapper<Image>> call =
client.getApi().toggleImageFavorite( image.getId() );
try {
Response<ImgurResponseWrapper<Image>> res = call.execute();
ImgurResponseWrapper<Image> out = res.body();
client.throwOnWrapperError( res );
return out.getData();
} catch (IOException e) {
throw new BaringoApiException( e.getMessage() );
}
}
// ================================================
protected ImageService( BaringoClient imgurClient, GsonBuilder gsonBuilder ) {
this.client = imgurClient;
extensionToMimeType.put( "apng", "image/png" );
extensionToMimeType.put( "gif", "image/gif" );
extensionToMimeType.put( "jpeg", "image/jpeg" );
extensionToMimeType.put( "jpg", "image/jpeg" );
extensionToMimeType.put( "pdf", "application/pdf" );
extensionToMimeType.put( "png", "image/png" );
extensionToMimeType.put( "tif", "image/tiff" );
extensionToMimeType.put( "tiff", "image/tiff" );
extensionToMimeType.put( "xcf", "image/xcf" );
} // constructor
private BaringoClient client = null;
private HashMap<String,String> extensionToMimeType = new HashMap<>();
} // class AccountService
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.jstorm.daemon.supervisor;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import backtype.storm.utils.LocalState;
import com.alibaba.jstorm.client.ConfigExtension;
import com.alibaba.jstorm.cluster.Common;
import com.alibaba.jstorm.daemon.worker.LocalAssignment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.Config;
import com.alibaba.jstorm.callback.RunnableCallback;
import com.alibaba.jstorm.cluster.StormClusterState;
import com.alibaba.jstorm.cluster.StormConfig;
import com.alibaba.jstorm.utils.JStormServerUtils;
import com.alibaba.jstorm.utils.JStormUtils;
import com.alibaba.jstorm.utils.TimeUtils;
/**
* supervisor Heartbeat, just write SupervisorInfo to ZK
*
* @author Johnfang (xiaojian.fxj@alibaba-inc.com)
*/
class Heartbeat extends RunnableCallback {
private static Logger LOG = LoggerFactory.getLogger(Heartbeat.class);
private static final int CPU_THREADHOLD = 4;
private static final long MEM_THREADHOLD = 8 * JStormUtils.SIZE_1_G;
private Map<Object, Object> conf;
private StormClusterState stormClusterState;
private String supervisorId;
private String myHostName;
private final int startTime;
private final int frequence;
private SupervisorInfo supervisorInfo;
private AtomicBoolean hbUpdateTrigger;
//protected HealthStatus oldHealthStatus;
//protected volatile HealthStatus healthStatus;
protected MachineCheckStatus oldCheckStatus;
protected volatile MachineCheckStatus checkStatus;
private LocalState localState;
/**
* @param conf
* @param stormClusterState
* @param supervisorId
* @param status
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public Heartbeat(Map conf, StormClusterState stormClusterState, String supervisorId, LocalState localState,
MachineCheckStatus status) {
String myHostName = JStormServerUtils.getHostName(conf);
this.stormClusterState = stormClusterState;
this.supervisorId = supervisorId;
this.conf = conf;
this.myHostName = myHostName;
this.startTime = TimeUtils.current_time_secs();
this.frequence = JStormUtils.parseInt(conf.get(Config.SUPERVISOR_HEARTBEAT_FREQUENCY_SECS));
this.hbUpdateTrigger = new AtomicBoolean(true);
this.localState = localState;
this.checkStatus = status;
oldCheckStatus = new MachineCheckStatus();
oldCheckStatus.SetType(this.checkStatus.getType());
initSupervisorInfo(conf);
LOG.info("Successfully init supervisor heartbeat thread, " + supervisorInfo);
}
private void initSupervisorInfo(Map conf) {
List<Integer> portList = JStormUtils.getSupervisorPortList(conf);
if (!StormConfig.local_mode(conf)) {
try {
boolean isLocaliP = false;
isLocaliP = myHostName.equals("127.0.0.1") || myHostName.equals("localhost");
if (isLocaliP) {
throw new Exception("the hostname which supervisor get is localhost");
}
} catch (Exception e1) {
LOG.error("get supervisor host error!", e1);
throw new RuntimeException(e1);
}
Set<Integer> ports = JStormUtils.listToSet(portList);
supervisorInfo = new SupervisorInfo(myHostName, supervisorId, ports);
} else {
Set<Integer> ports = JStormUtils.listToSet(portList);
supervisorInfo = new SupervisorInfo(myHostName, supervisorId, ports);
}
}
@SuppressWarnings("unchecked")
public void update() {
supervisorInfo.setTimeSecs(TimeUtils.current_time_secs());
supervisorInfo.setUptimeSecs((int) (TimeUtils.current_time_secs() - startTime));
if (!checkStatus.equals(oldCheckStatus)) {
if (checkStatus.getType() == MachineCheckStatus.StatusType.warning
|| checkStatus.getType() == MachineCheckStatus.StatusType.error
|| checkStatus.getType() == MachineCheckStatus.StatusType.panic) {
Set<Integer> ports = new HashSet<Integer>();
supervisorInfo.setWorkerPorts(ports);
LOG.warn("due to no enough resourse, limit supervisor's ports and block scheduling");
} else {
updateSupervisorInfo();
}
oldCheckStatus.SetType(checkStatus.getType());
} else {
updateSupervisorInfo();
}
try {
stormClusterState.supervisor_heartbeat(supervisorId, supervisorInfo);
} catch (Exception e) {
LOG.error("Failed to update SupervisorInfo to ZK", e);
}
}
private void updateSupervisorInfo() {
List<Integer> portList = calculatorAvailablePorts();
Set<Integer> ports = JStormUtils.listToSet(portList);
supervisorInfo.setWorkerPorts(ports);
}
public MachineCheckStatus getCheckStatus() {
return checkStatus;
}
@Override
public Object getResult() {
return frequence;
}
@Override
public void run() {
boolean updateHb = hbUpdateTrigger.getAndSet(false);
if (updateHb) {
update();
}
}
public int getStartTime() {
return startTime;
}
public SupervisorInfo getSupervisorInfo() {
return supervisorInfo;
}
public void updateHbTrigger(boolean update) {
hbUpdateTrigger.set(update);
}
private List<Integer> calculatorAvailablePorts() {
if (JStormUtils.getTotalCpuUsage() <= 0.0 || !ConfigExtension.isSupervisorEnableAutoAdjustSlots(conf)) {
return JStormUtils.getSupervisorPortList(conf);
}
long freeMemory = JStormUtils.getFreePhysicalMem() * 1024L;
long reserveMemory = ConfigExtension.getStormMachineReserveMem(conf);
if (freeMemory < reserveMemory) {
List<Integer> list = null;
try {
list = getLocalAssignmentPortList();
} catch (IOException e) {
return JStormUtils.getSupervisorPortList(conf);
}
if (list == null)
return new ArrayList<Integer>();
return list;
}
int reserveCpuUsage = ConfigExtension.getStormMachineReserveCpuPercent(conf);
double cpuUsage = JStormUtils.getTotalCpuUsage();
if (cpuUsage > (100D - reserveCpuUsage)) {
List<Integer> list = null;
try {
list = getLocalAssignmentPortList();
} catch (IOException e) {
return JStormUtils.getSupervisorPortList(conf);
}
if (list == null)
return new ArrayList<Integer>();
return list;
}
Long conversionAvailableCpuNum = Math.round((100 - cpuUsage) / 100 * JStormUtils.getNumProcessors());
Long availablePhysicalMemorySize = freeMemory - reserveMemory;
int portNum = JStormUtils.getSupervisorPortNum(
conf, conversionAvailableCpuNum.intValue(), availablePhysicalMemorySize, true);
List<Integer> portList = JStormUtils.getSupervisorPortList(conf);
List<Integer> usedList = null;
try {
usedList = getLocalAssignmentPortList();
} catch (Exception e) {
return JStormUtils.getSupervisorPortList(conf);
}
portList.removeAll(usedList);
Collections.sort(portList);
//Collections.sort(usedList);
if (portNum >= portList.size()) {
return JStormUtils.getSupervisorPortList(conf);
} else {
List<Integer> reportPortList = new ArrayList<Integer>();
reportPortList.addAll(usedList);
for (int i = 1; i <= portNum; i++) {
reportPortList.add(portList.get(i));
}
return reportPortList;
}
}
private List<Integer> getLocalAssignmentPortList() throws IOException {
Map<Integer, LocalAssignment> localAssignment = null;
try {
localAssignment = (Map<Integer, LocalAssignment>) localState.get(Common.LS_LOCAL_ASSIGNMENTS);
} catch (IOException e) {
LOG.error("get LS_LOCAL_ASSIGNMENTS of localState failed .");
throw e;
}
if (localAssignment == null) {
return null;
}
return JStormUtils.mk_list(localAssignment.keySet());
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spi.block;
import org.openjdk.jol.info.ClassLayout;
import javax.annotation.Nullable;
import java.util.Optional;
import java.util.function.BiConsumer;
import static com.facebook.presto.spi.block.BlockUtil.checkArrayRange;
import static com.facebook.presto.spi.block.BlockUtil.checkValidRegion;
import static com.facebook.presto.spi.block.BlockUtil.compactArray;
import static com.facebook.presto.spi.block.BlockUtil.countUsedPositions;
import static com.facebook.presto.spi.block.BlockUtil.internalPositionInRange;
import static io.airlift.slice.SizeOf.sizeOf;
public class ByteArrayBlock
implements Block
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(ByteArrayBlock.class).instanceSize();
private final int arrayOffset;
private final int positionCount;
@Nullable
private final boolean[] valueIsNull;
private final byte[] values;
private final long sizeInBytes;
private final long retainedSizeInBytes;
public ByteArrayBlock(int positionCount, Optional<boolean[]> valueIsNull, byte[] values)
{
this(0, positionCount, valueIsNull.orElse(null), values);
}
ByteArrayBlock(int arrayOffset, int positionCount, boolean[] valueIsNull, byte[] values)
{
if (arrayOffset < 0) {
throw new IllegalArgumentException("arrayOffset is negative");
}
this.arrayOffset = arrayOffset;
if (positionCount < 0) {
throw new IllegalArgumentException("positionCount is negative");
}
this.positionCount = positionCount;
if (values.length - arrayOffset < positionCount) {
throw new IllegalArgumentException("values length is less than positionCount");
}
this.values = values;
if (valueIsNull != null && valueIsNull.length - arrayOffset < positionCount) {
throw new IllegalArgumentException("isNull length is less than positionCount");
}
this.valueIsNull = valueIsNull;
sizeInBytes = (Byte.BYTES + Byte.BYTES) * (long) positionCount;
retainedSizeInBytes = (INSTANCE_SIZE + sizeOf(valueIsNull) + sizeOf(values));
}
@Override
public long getSizeInBytes()
{
return sizeInBytes;
}
@Override
public long getRegionSizeInBytes(int position, int length)
{
return (Byte.BYTES + Byte.BYTES) * (long) length;
}
@Override
public long getPositionsSizeInBytes(boolean[] positions)
{
return (Byte.BYTES + Byte.BYTES) * (long) countUsedPositions(positions);
}
@Override
public long getRetainedSizeInBytes()
{
return retainedSizeInBytes;
}
@Override
public long getEstimatedDataSizeForStats(int position)
{
return isNull(position) ? 0 : Byte.BYTES;
}
@Override
public void retainedBytesForEachPart(BiConsumer<Object, Long> consumer)
{
consumer.accept(values, sizeOf(values));
if (valueIsNull != null) {
consumer.accept(valueIsNull, sizeOf(valueIsNull));
}
consumer.accept(this, (long) INSTANCE_SIZE);
}
@Override
public int getPositionCount()
{
return positionCount;
}
@Override
public byte getByte(int position)
{
checkReadablePosition(position);
return getByteUnchecked(position + arrayOffset);
}
@Override
public boolean mayHaveNull()
{
return valueIsNull != null;
}
@Override
public boolean isNull(int position)
{
checkReadablePosition(position);
return valueIsNull != null && isNullUnchecked(position + arrayOffset);
}
@Override
public void writePositionTo(int position, BlockBuilder blockBuilder)
{
checkReadablePosition(position);
blockBuilder.writeByte(values[position + arrayOffset]);
blockBuilder.closeEntry();
}
@Override
public Block getSingleValueBlock(int position)
{
checkReadablePosition(position);
return new ByteArrayBlock(
0,
1,
isNull(position) ? new boolean[] {true} : null,
new byte[] {values[position + arrayOffset]});
}
@Override
public Block copyPositions(int[] positions, int offset, int length)
{
checkArrayRange(positions, offset, length);
boolean[] newValueIsNull = null;
if (valueIsNull != null) {
newValueIsNull = new boolean[length];
}
byte[] newValues = new byte[length];
for (int i = 0; i < length; i++) {
int position = positions[offset + i];
checkReadablePosition(position);
if (valueIsNull != null) {
newValueIsNull[i] = valueIsNull[position + arrayOffset];
}
newValues[i] = values[position + arrayOffset];
}
return new ByteArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public Block getRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
return new ByteArrayBlock(positionOffset + arrayOffset, length, valueIsNull, values);
}
@Override
public Block copyRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
positionOffset += arrayOffset;
boolean[] newValueIsNull = valueIsNull == null ? null : compactArray(valueIsNull, positionOffset, length);
byte[] newValues = compactArray(values, positionOffset, length);
if (newValueIsNull == valueIsNull && newValues == values) {
return this;
}
return new ByteArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public String getEncodingName()
{
return ByteArrayBlockEncoding.NAME;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder("ByteArrayBlock{");
sb.append("positionCount=").append(getPositionCount());
sb.append('}');
return sb.toString();
}
private void checkReadablePosition(int position)
{
if (position < 0 || position >= getPositionCount()) {
throw new IllegalArgumentException("position is not valid");
}
}
@Override
public int getOffsetBase()
{
return arrayOffset;
}
@Override
public boolean isNullUnchecked(int internalPosition)
{
assert mayHaveNull() : "no nulls present";
assert internalPositionInRange(internalPosition, getOffsetBase(), getPositionCount());
return valueIsNull[internalPosition];
}
@Override
public byte getByteUnchecked(int internalPosition)
{
assert internalPositionInRange(internalPosition, getOffsetBase(), getPositionCount());
return values[internalPosition];
}
}
| |
/*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.provider;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.impl.builder.StAXOMBuilder;
import org.apache.axiom.om.impl.dom.factory.OMDOMFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.rahas.RahasData;
import org.apache.ws.security.WSConstants;
import org.apache.xml.security.utils.Constants;
import org.w3c.dom.Element;
import org.wso2.carbon.identity.base.IdentityConstants;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.IdentityClaimManager;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.user.core.UserStoreManager;
import org.wso2.carbon.user.core.claim.Claim;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
import java.io.ByteArrayInputStream;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public class GenericIdentityProviderData {
public static final String USERMAN_SERVICE = "UserManServiceURL";
public static final String USER_CLASS = "UserClass";
private static final Log log = LogFactory.getLog(GenericIdentityProviderData.class);
protected String cardID = null;
protected Map<String, RequestedClaimData> requestedClaims = new HashMap<String, RequestedClaimData>();
protected Map<String, Claim> supportedClaims = new HashMap<String, Claim>();
protected String displayTokenLang = null;
protected int authMechanism = -1;
protected X509Certificate rpCert;
protected String userIdentifier = null;
protected String requiredTokenType = null;
/**
* Populate CardSpace specific meta-data.
*
* @param data WS-Trust information in the issue request.
* @throws IdentityProviderException
* @throws ClassNotFoundException
*/
public GenericIdentityProviderData(RahasData data) throws IdentityProviderException, ClassNotFoundException {
OMElement rstElem = null;
OMElement claimElem = null;
rstElem = data.getRstElement();
claimElem = data.getClaimElem();
readAuthenticationMechanism(data);
processUserIdentifier(data);
loadClaims();
processClaimData(data, claimElem);
processInfoCardReference(rstElem);
readRequestedTokenType(data);
populateClaimValues(data);
extracAndValidatetRPCert(data);
}
public String getRequiredTokenType() {
return requiredTokenType;
}
public void setRequiredTokenType(String requiredTokenType) {
if (StringUtils.isBlank(requiredTokenType)) {
this.requiredTokenType = getDefautTokenType();
} else {
this.requiredTokenType = requiredTokenType;
}
}
/**
* This parameter specifies the type of the token to be requested from the STS as a URI. This
* parameter can be omitted if the STS and the Web site front-end have a mutual understanding
* about what token type will be provided, or if the Web site is willing to accept any token
* type.
*
* @return Default Token Type
*/
public String getDefautTokenType() {
return IdentityConstants.SAML10_URL;
}
public X509Certificate getRpCert() {
return rpCert;
}
public String getUserIdentifier() {
return null;
}
public String getCardID() {
return null;
}
public String getDisplayTokenLang() {
return null;
}
public String getDisplayName(String uri) {
return null;
}
public String getTenantDomain() throws IdentityProviderException {
return null;
}
/**
* @throws IdentityProviderException
*/
protected void loadClaims() throws IdentityProviderException {
IdentityClaimManager claimManager = null;
Claim[] claims = null;
if (log.isDebugEnabled()) {
log.debug("Loading claims");
}
try {
claimManager = IdentityClaimManager.getInstance();
claims = claimManager.getAllSupportedClaims(IdentityConstants.INFOCARD_DIALECT, IdentityTenantUtil
.getRealm(null, userIdentifier));
for (int i = 0; i < claims.length; i++) {
Claim temp = claims[i];
supportedClaims.put(temp.getClaimUri(), temp);
}
Claim tenant = new Claim();
tenant.setClaimUri(IdentityConstants.CLAIM_TENANT_DOMAIN);
tenant.setDescription("Tenant");
tenant.setDisplayTag("Tenant");
tenant.setSupportedByDefault(true);
tenant.setDialectURI("http://wso2.org");
supportedClaims.put(tenant.getClaimUri(), tenant);
} catch (IdentityException e) {
log.error("Error while loading claims", e);
throw new IdentityProviderException("Error while loading claims", e);
}
}
/**
* @param rahasData
* @param claims
* @throws IdentityProviderException
*/
protected void processClaimData(RahasData rahasData, OMElement claims) throws IdentityProviderException {
if (claims == null) {
return;
}
if (log.isDebugEnabled()) {
log.debug("Processing claim data");
}
Iterator iterator = null;
iterator = claims.getChildrenWithName(
new QName(IdentityConstants.NS, IdentityConstants.LocalNames.IDENTITY_CLAIM_TYPE));
while (iterator.hasNext()) {
OMElement omElem = null;
RequestedClaimData claim = null;
String uriClaim = null;
String optional = null;
omElem = (OMElement) iterator.next();
claim = getRequestedClaim();
uriClaim = omElem.getAttributeValue(new QName(null, "Uri"));
if (uriClaim == null) {
log.error("Empty claim uri found while procession claim data");
throw new IdentityProviderException(
"Empty claim uri found while procession claim data");
}
claim.setUri(uriClaim);
optional = (omElem.getAttributeValue(new QName(null, "Optional")));
if (StringUtils.isNotBlank(optional)) {
claim.setBOptional("true".equals(optional));
} else {
claim.setBOptional(true);
}
requestedClaims.put(claim.getUri(), claim);
}
}
/**
* @param rst
* @throws IdentityProviderException
*/
protected void processInfoCardReference(OMElement rst) throws IdentityProviderException {
// In the generic case we have nothing to do here.
}
/**
* @param data
* @throws IdentityProviderException
*/
protected void readAuthenticationMechanism(RahasData data) throws IdentityProviderException {
// In the generic case we have nothing to do here.
}
/**
* Extract the relying party certificate and validate it.
*
* @param data Information in the RST extracted by Rahas.
*/
protected void extracAndValidatetRPCert(RahasData data) throws IdentityProviderException {
// In the generic case we have nothing to do here.
}
/**
* Obtain the user identifier depending on the authentication mechanism used.
*
* @param rahasData
*/
protected void processUserIdentifier(RahasData rahasData) throws IdentityProviderException {
if (log.isDebugEnabled()) {
log.debug("Processing user identifier");
}
userIdentifier = rahasData.getPrincipal().getName();
}
/**
* @param rahasData
* @throws IdentityProviderException
*/
protected void populateClaimValues(RahasData rahasData) throws IdentityProviderException {
UserStoreManager connector = null;
if (log.isDebugEnabled()) {
log.debug("Populating claim values");
}
try {
connector = IdentityTenantUtil.getRealm(null, userIdentifier).getUserStoreManager();
} catch (Exception e) {
log.error("Error while instantiating IdentityUserStore", e);
throw new IdentityProviderException("Error while instantiating IdentityUserStore", e);
}
// get the column names for the URIs
Iterator<RequestedClaimData> ite = requestedClaims.values().iterator();
List<String> claimList = new ArrayList<String>();
while (ite.hasNext()) {
RequestedClaimData claim = ite.next();
if (claim != null && !claim.getUri().equals(IdentityConstants.CLAIM_PPID) &&
!claim.getUri().equals(IdentityConstants.CLAIM_TENANT_DOMAIN)) {
claimList.add(claim.getUri());
}
}
String[] claims = new String[claimList.size()];
String userId = MultitenantUtils.getTenantAwareUsername(userIdentifier);
Map<String, String> mapValues = null;
try {
mapValues = connector.getUserClaimValues(userId, claimList.toArray(claims), null);
} catch (Exception e) {
throw new IdentityProviderException(e.getMessage(), e);
}
ite = requestedClaims.values().iterator();
while (ite.hasNext()) {
RequestedClaimData claimData = ite.next();
if (IdentityConstants.CLAIM_TENANT_DOMAIN.equals(claimData.getUri())) {
String domainName = null;
domainName = MultitenantUtils.getTenantDomain(userIdentifier);
if (domainName == null) {
domainName = IdentityConstants.DEFAULT_SUPER_TENAT;
}
claimData.setValue(domainName);
} else {
claimData.setValue(mapValues.get(claimData.getUri()));
}
}
}
/**
* Validate the given ds:KeyInfo element against the stored ds:KeyInfo element.
*
* @param issuerInfo Stored ds:KeyInfo element as a <code>java.lang.String</code>.
* @param keyInfo The incoming ds:KeyInfo element as a <code>org.w3c.dom.Element</code>.
* @return true if the information matches, otherwise false.
*/
protected boolean validateKeyInfo(String issuerInfo, Element keyInfo) throws IdentityProviderException {
if (log.isDebugEnabled()) {
log.debug("Validating key info");
}
try {
OMElement elem = new StAXOMBuilder(new ByteArrayInputStream(issuerInfo.getBytes())).getDocumentElement();
OMElement keyValueElem = elem.getFirstElement();
if (keyValueElem != null &&
keyValueElem.getQName().equals(new QName(WSConstants.SIG_NS, Constants._TAG_KEYVALUE))) {
// KeyValue structure : expect an RSAKeyValue
OMElement rsaKeyValueElem = keyValueElem.getFirstElement();
if (rsaKeyValueElem != null &&
rsaKeyValueElem.getQName().equals(new QName(WSConstants.SIG_NS, Constants._TAG_RSAKEYVALUE))) {
String modulus =
rsaKeyValueElem.getFirstChildWithName(new QName(WSConstants.SIG_NS, Constants._TAG_MODULUS))
.getText().trim();
String exponent = rsaKeyValueElem
.getFirstChildWithName(new QName(WSConstants.SIG_NS, Constants._TAG_EXPONENT)).getText()
.trim();
// Now process the incoming element to check for ds:RSAKeyValue
OMElement receivedKeyInfoElem = (OMElement) new OMDOMFactory().getDocument().importNode(keyInfo,
true);
OMElement receivedKeyValueElem = receivedKeyInfoElem.getFirstElement();
if (receivedKeyValueElem != null && receivedKeyValueElem.getQName()
.equals(new QName(WSConstants.SIG_NS,
Constants._TAG_KEYVALUE))) {
OMElement receivedRsaKeyValueElem = receivedKeyValueElem.getFirstChildWithName(
new QName(WSConstants.SIG_NS, Constants._TAG_RSAKEYVALUE));
if (receivedRsaKeyValueElem != null) {
// Obtain incoming mod and exp
String receivedModulus = receivedRsaKeyValueElem
.getFirstChildWithName(new QName(WSConstants.SIG_NS, Constants._TAG_MODULUS))
.getText().trim();
String receivedExponent = receivedRsaKeyValueElem.getFirstChildWithName(
new QName(WSConstants.SIG_NS, Constants._TAG_EXPONENT)).getText().trim();
// Compare
return modulus.equals(receivedModulus) && exponent.equals(receivedExponent);
} else {
log.error("Unknown received KeyInfo type");
throw new IdentityProviderException("Unknown received KeyInfo type");
}
} else {
log.error("Unknown received KeyInfo type");
throw new IdentityProviderException("Unknown received KeyInfo type");
}
} else {
log.error("Error while instantiating IdentityUserStore");
throw new IdentityProviderException("Unknown received KeyInfo type");
}
} else {
log.error("Unknown stored KeyInfo type");
throw new IdentityProviderException("Unknown stored KeyInfo type");
}
} catch (XMLStreamException e) {
log.error("Error parsing stored KeyInfo", e);
throw new IdentityProviderException("Error parsing stored KeyInfo");
}
}
protected void readRequestedTokenType(RahasData data) {
requiredTokenType = data.getTokenType();
if (requiredTokenType == null || requiredTokenType.trim().length() == 0) {
requiredTokenType = getDefautTokenType();
}
}
public Map<String, RequestedClaimData> getRequestedClaims() {
return requestedClaims;
}
/**
* @param requestedClaims
*/
public void setRequestedClaims(Map<String, RequestedClaimData> requestedClaims) {
this.requestedClaims = requestedClaims;
}
protected RequestedClaimData getRequestedClaim() {
return new RequestedClaimData();
}
}
| |
package id3.tables;
import id3.tables.TableEntryComparator.CompareType;
import id3.tables.abstractid3model.AbstractID3Model;
import id3.utils.Utils;
import javax.swing.*;
import javax.swing.table.JTableHeader;
import javax.swing.table.TableModel;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.io.IOException;
import java.util.Collections;
/** Custom {@code JTable} backed by
* an {@link AbstractID3Model}.
*/
public class ID3Table extends JTable
{
protected AbstractID3Model model;
protected JPopupMenu rightClickMenu = new JPopupMenu();
/** Custom {@code JTable} backed by
* an {@link AbstractID3Model}.
* @param model {@link AbstractID3Model} to back
*/
public ID3Table(AbstractID3Model model)
{
this.setModel(model);
this.setAutoResizeMode(JTable.AUTO_RESIZE_NEXT_COLUMN);
this.getTableHeader().setReorderingAllowed(false);
registerRightClick();
registerTableHeader();
}
/** Adds right click options and logic
* to the table.
*/
private void registerRightClick()
{
JMenuItem itemRemove = new JMenuItem("Remove Selected");
itemRemove.addActionListener(e ->
{
int[] selectedrows = ID3Table.this.getSelectedRows();
for(int i = selectedrows.length - 1; i >= 0; i--)
{
ID3Table.this.getModel().getTableEntries().remove(selectedrows[i]);
ID3Table.this.getModel().fireTableRowsDeleted(i, i);
}
ID3Table.this.clearSelection();
});
rightClickMenu.add(itemRemove);
JMenuItem itemOpen = new JMenuItem("Open File Location");
itemOpen.addActionListener(e ->
{
int[] selectedrows = ID3Table.this.getSelectedRows();
for(int current : selectedrows)
{
TableEntry tableEntry = ID3Table.this.getModel().getTableEntries().get(current);
String path = Utils.convertForwardToBackSlash(tableEntry.FilePath);
try
{
Runtime.getRuntime().exec("explorer " + path);
}
catch (IOException ex)
{
ex.printStackTrace();
}
}
});
rightClickMenu.add(itemOpen);
this.addMouseListener(new MouseListener()
{
@Override
public void mousePressed(MouseEvent e)
{
int r = ID3Table.this.rowAtPoint(e.getPoint());
if (r >= 0 && r < ID3Table.this.getRowCount())
{
int[] selected = ID3Table.this.getSelectedRows();
for(int current : selected)
{
if(r==current)
{
return;
}
}
ID3Table.this.setRowSelectionInterval(r, r);
}
else
{
ID3Table.this.clearSelection();
}
}
@Override
public void mouseReleased(MouseEvent e)
{
if(e.isPopupTrigger())
{
rightClickMenu.show(e.getComponent(), e.getX(), e.getY());
}
}
public void mouseClicked(MouseEvent arg0) {}
public void mouseEntered(MouseEvent arg0) {}
public void mouseExited(MouseEvent arg0) {}
});
}
/** Provides sorting logic for
* clicks on the table header.
*/
private void registerTableHeader()
{
this.getTableHeader().addMouseListener(new MouseListener()
{
public void mouseClicked(MouseEvent e)
{
JTableHeader clickedheader = (JTableHeader) e.getSource();
String columnName = ID3Table.this.getColumnName(clickedheader.columnAtPoint(e.getPoint()));
switch(columnName)
{
case "Title" :
case "Song" :
case "Current Song Title" : //TODO reverse sort
{
Collections.sort(model.getTableEntries(), new TableEntryComparator(CompareType.SongTitle));
break;
}
case "New Title" :
case "New Song Title" :
{
Collections.sort(model.getTableEntries(), new TableEntryComparator(CompareType.NewTitle));
break;
}
case "Artist" :
{
Collections.sort(model.getTableEntries(), new TableEntryComparator(CompareType.Artist));
break;
}
case "Album" :
{
Collections.sort(model.getTableEntries(), new TableEntryComparator(CompareType.Album));
break;
}
case "Track #" :
{
Collections.sort(model.getTableEntries(), new TableEntryComparator(CompareType.TrackNumber));
break;
}
case "Rating" :
{
Collections.sort(model.getTableEntries(), new TableEntryComparator(CompareType.Rating));
break;
}
case "File Path" :
{
Collections.sort(model.getTableEntries(), new TableEntryComparator(CompareType.FilePath));
break;
}
case "New File Path" :
{
Collections.sort(model.getTableEntries(), new TableEntryComparator(CompareType.NewFilePath));
break;
}
case "Field" :
case "Status" :
{
Collections.sort(model.getTableEntries(), new TableEntryComparator(CompareType.Status));
break;
}
}
model.fireTableDataChanged();
}
public void mousePressed(MouseEvent e) {}
public void mouseReleased(MouseEvent e) {}
public void mouseEntered(MouseEvent e) {}
public void mouseExited(MouseEvent e) {}
});
}
public AbstractID3Model getModel()
{
return model;
}
/** Sets the model, but also calls
* {@link JTable#setModel(TableModel)}
* to fire the appropriate update listeners.
* @param model {@link AbstractID3Model} to back
* this ID3Table instance.
*/
public void setModel(AbstractID3Model model)
{
super.setModel(model);
this.model = model;
}
}
| |
/*
* Copyright (c) 2017-2021 Hugo Dupanloup (Yeregorix)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.smoofyuniverse.common.resource;
import javafx.application.Platform;
import net.smoofyuniverse.common.event.ListenerRegistration;
import net.smoofyuniverse.common.event.resource.LanguageSelectionChangeEvent;
import net.smoofyuniverse.common.event.resource.ResourceModuleChangeEvent;
import net.smoofyuniverse.common.event.resource.TranslatorUpdateEvent;
import net.smoofyuniverse.common.logger.ApplicationLogger;
import net.smoofyuniverse.common.util.StringUtil;
import org.slf4j.Logger;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
/**
* An interpreter for string resource modules.
*/
public final class Translator {
private static final Logger logger = ApplicationLogger.get(Translator.class);
/**
* The resource manager.
*/
public final ResourceManager manager;
private ResourceModule<String> defaultModule, selectionModule;
private final Map<String, ObservableTranslation> cache = new ConcurrentHashMap<>();
private final List<ObservableTranslation> translations = new ArrayList<>();
Translator(ResourceManager manager) {
this.manager = manager;
}
/**
* Binds all empty translations in static fields of the class.
*
* @param target The class.
* @throws IllegalAccessException If any reflection error occurs.
*/
public void bindStaticFields(Class<?> target) throws IllegalAccessException {
if (target == null)
throw new IllegalArgumentException("target");
for (Field f : target.getDeclaredFields()) {
if (Modifier.isStatic(f.getModifiers()) && ObservableTranslation.class.isAssignableFrom(f.getType()) && ResourceModule.isValidKey(f.getName())) {
f.setAccessible(true);
ObservableTranslation translation = (ObservableTranslation) f.get(null);
if (translation.getKey().isEmpty())
_bindTranslation(translation, f.getName());
}
}
}
private void _bindTranslation(ObservableTranslation translation, String key) {
translation.setKey(this, key);
this.cache.putIfAbsent(key, translation);
synchronized (this.translations) {
this.translations.add(translation);
}
}
/**
* Binds an empty translation to the given key.
*
* @param translation The empty translation.
* @param key The key.
* @throws IllegalArgumentException If the translation is not empty.
*/
public void bindTranslation(ObservableTranslation translation, String key) {
ResourceModule.checkKey(key);
if (!translation.getKey().isEmpty())
throw new IllegalArgumentException("translation is not empty");
_bindTranslation(translation, key);
}
/**
* Gets an observable translation for the given key.
*
* @param key The key.
* @return The observable translation.
*/
public ObservableTranslation getTranslation(String key) {
ResourceModule.checkKey(key);
return this.cache.computeIfAbsent(key, this::createTranslation);
}
private ObservableTranslation createTranslation(String key) {
ObservableTranslation translation = new ObservableTranslation();
translation.setKey(this, key);
synchronized (this.translations) {
this.translations.add(translation);
}
return translation;
}
/**
* Translates the key.
* Gets the resource associated with the key in the selected language,
* then if not found in the default language,
* then if not found, returns the key.
*
* @param key The key.
* @return The translation.
*/
public String translate(String key) {
ResourceModule.checkKey(key);
return _translate(key);
}
String _translate(String key) {
if (this.selectionModule != null) {
Optional<String> value = this.selectionModule.get(key);
if (value.isPresent())
return value.get();
}
if (this.defaultModule != null) {
Optional<String> value = this.defaultModule.get(key);
if (value.isPresent())
return value.get();
}
return key;
}
/**
* Translates the key and replaces the parameters.
* See {@link Translator#translate(String)} and {@link StringUtil#replaceParameters(String, String...)}.
*
* @param key The key.
* @param parameters The parameters.
* @return The translation.
*/
public String translate(String key, String... parameters) {
ResourceModule.checkKey(key);
if (this.selectionModule != null) {
Optional<String> value = this.selectionModule.get(key);
if (value.isPresent())
return StringUtil.replaceParameters(value.get(), parameters);
}
if (this.defaultModule != null) {
Optional<String> value = this.defaultModule.get(key);
if (value.isPresent())
return StringUtil.replaceParameters(value.get(), parameters);
}
return key;
}
private void update() {
this.defaultModule = this.manager.getDefaultPack().getModule(String.class).orElse(null);
this.selectionModule = this.manager.getSelectionPack().getModule(String.class).orElse(null);
if (this.defaultModule == this.selectionModule)
this.defaultModule = null;
Platform.runLater(() -> {
synchronized (this.translations) {
for (ObservableTranslation t : this.translations)
t.update(this);
}
new TranslatorUpdateEvent(this).post();
});
}
/**
* Saves the string resource module to the file.
*
* @param module The module.
* @param file The file.
* @throws IOException If any I/O error occurs.
*/
public static void save(ResourceModule<String> module, Path file) throws IOException {
try (BufferedWriter writer = Files.newBufferedWriter(file)) {
save(module, writer);
}
}
/**
* Saves the string resource module to the writer.
*
* @param module The module.
* @param writer The writer.
* @throws IOException If any I/O error occurs.
*/
public static void save(ResourceModule<String> module, BufferedWriter writer) throws IOException {
for (Entry<String, String> e : module.toMap().entrySet()) {
writer.write(e.getKey());
writer.write('=');
writer.write(e.getValue());
writer.newLine();
}
}
/**
* Loads a string resource module from the file.
*
* @param file The file.
* @return The new string resource module.
* @throws IOException If any I/O error occurs.
*/
public static ResourceModule<String> load(Path file) throws IOException {
ResourceModule.Builder<String> builder = ResourceModule.builder(String.class);
load(builder, file);
return builder.build();
}
/**
* Loads a string resource module from the file.
*
* @param builder The module builder.
* @param file The file.
* @throws IOException If any I/O error occurs.
*/
public static void load(ResourceModule.Builder<String> builder, Path file) throws IOException {
try (BufferedReader reader = Files.newBufferedReader(file)) {
load(builder, reader);
}
}
/**
* Loads a string resource module from the reader.
*
* @param builder The module builder.
* @param reader The reader.
* @throws IOException If any I/O error occurs.
*/
public static void load(ResourceModule.Builder<String> builder, BufferedReader reader) throws IOException {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.isEmpty() || line.charAt(0) == '#')
continue;
int i = line.indexOf('=');
if (i == -1)
throw new IllegalArgumentException("No '=' separator was found");
builder.add(line.substring(0, i), StringUtil.unescape(line.substring(i + 1)));
}
}
public static ResourceModule<String> load(BufferedReader reader) throws IOException {
ResourceModule.Builder<String> builder = ResourceModule.builder(String.class);
load(builder, reader);
return builder.build();
}
public static Map<Language, ResourceModule<String>> loadAll(Path dir, String extension) {
if (!extension.isEmpty() && extension.charAt(0) != '.')
extension = '.' + extension;
Map<Language, ResourceModule<String>> map = new HashMap<>();
ResourceModule.Builder<String> builder = ResourceModule.builder(String.class);
try (DirectoryStream<Path> st = Files.newDirectoryStream(dir)) {
for (Path p : st) {
String fn = p.getFileName().toString();
if (fn.endsWith(extension)) {
String id = fn.substring(0, fn.length() - extension.length());
if (Language.isValidId(id)) {
try {
load(builder, p);
map.put(Language.of(id), builder.build());
} catch (Exception e) {
logger.error("Failed to load lang file {}", fn, e);
}
builder.reset();
}
}
}
} catch (Exception e) {
logger.error("Can't list lang files in directory {}", dir, e);
}
return map;
}
static {
new ListenerRegistration<>(LanguageSelectionChangeEvent.class, e -> e.manager.translator.update(), -100).register();
new ListenerRegistration<>(ResourceModuleChangeEvent.class, e -> {
Translator t = e.pack.manager.translator;
if (e.prevModule == t.defaultModule || e.prevModule == t.selectionModule)
t.update();
}, -100).register();
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.highlighting;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.ex.ActionManagerEx;
import com.intellij.openapi.actionSystem.ex.AnActionListener;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.editor.ex.MarkupModelEx;
import com.intellij.openapi.editor.markup.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.UserDataHolderEx;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiReference;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.ColorUtil;
import com.intellij.util.BitUtil;
import java.util.HashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.util.*;
public class HighlightManagerImpl extends HighlightManager {
private final Project myProject;
public HighlightManagerImpl(Project project) {
myProject = project;
ActionManagerEx.getInstanceEx().addAnActionListener(new MyAnActionListener(), myProject);
DocumentListener documentListener = new DocumentListener() {
@Override
public void documentChanged(DocumentEvent event) {
Document document = event.getDocument();
Editor[] editors = EditorFactory.getInstance().getEditors(document);
for (Editor editor : editors) {
Map<RangeHighlighter, HighlightInfo> map = getHighlightInfoMap(editor, false);
if (map == null) return;
ArrayList<RangeHighlighter> highlightersToRemove = new ArrayList<>();
for (RangeHighlighter highlighter : map.keySet()) {
HighlightInfo info = map.get(highlighter);
if (!info.editor.getDocument().equals(document)) continue;
if (BitUtil.isSet(info.flags, HIDE_BY_TEXT_CHANGE)) {
highlightersToRemove.add(highlighter);
}
}
for (RangeHighlighter highlighter : highlightersToRemove) {
removeSegmentHighlighter(editor, highlighter);
}
}
}
};
EditorFactory.getInstance().getEventMulticaster().addDocumentListener(documentListener, myProject);
}
@Nullable
public Map<RangeHighlighter, HighlightInfo> getHighlightInfoMap(@NotNull Editor editor, boolean toCreate) {
if (editor instanceof EditorWindow) return getHighlightInfoMap(((EditorWindow)editor).getDelegate(), toCreate);
Map<RangeHighlighter, HighlightInfo> map = editor.getUserData(HIGHLIGHT_INFO_MAP_KEY);
if (map == null && toCreate) {
map = ((UserDataHolderEx)editor).putUserDataIfAbsent(HIGHLIGHT_INFO_MAP_KEY, new HashMap<>());
}
return map;
}
@NotNull
public RangeHighlighter[] getHighlighters(@NotNull Editor editor) {
Map<RangeHighlighter, HighlightInfo> highlightersMap = getHighlightInfoMap(editor, false);
if (highlightersMap == null) return RangeHighlighter.EMPTY_ARRAY;
Set<RangeHighlighter> set = new HashSet<>();
for (Map.Entry<RangeHighlighter, HighlightInfo> entry : highlightersMap.entrySet()) {
HighlightInfo info = entry.getValue();
if (info.editor.equals(editor)) set.add(entry.getKey());
}
return set.toArray(RangeHighlighter.EMPTY_ARRAY);
}
private RangeHighlighter addSegmentHighlighter(@NotNull Editor editor, int startOffset, int endOffset, TextAttributes attributes, @HideFlags int flags) {
RangeHighlighter highlighter = editor.getMarkupModel()
.addRangeHighlighter(startOffset, endOffset, HighlighterLayer.SELECTION - 1, attributes, HighlighterTargetArea.EXACT_RANGE);
HighlightInfo info = new HighlightInfo(editor instanceof EditorWindow ? ((EditorWindow)editor).getDelegate() : editor, flags);
Map<RangeHighlighter, HighlightInfo> map = getHighlightInfoMap(editor, true);
map.put(highlighter, info);
return highlighter;
}
@Override
public boolean removeSegmentHighlighter(@NotNull Editor editor, @NotNull RangeHighlighter highlighter) {
Map<RangeHighlighter, HighlightInfo> map = getHighlightInfoMap(editor, false);
if (map == null) return false;
HighlightInfo info = map.get(highlighter);
if (info == null) return false;
MarkupModel markupModel = info.editor.getMarkupModel();
if (((MarkupModelEx)markupModel).containsHighlighter(highlighter)) {
highlighter.dispose();
}
map.remove(highlighter);
return true;
}
@Override
public void addOccurrenceHighlights(@NotNull Editor editor,
@NotNull PsiReference[] occurrences,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
Collection<RangeHighlighter> outHighlighters) {
if (occurrences.length == 0) return;
int flags = HIDE_BY_ESCAPE;
if (hideByTextChange) {
flags |= HIDE_BY_TEXT_CHANGE;
}
Color scrollmarkColor = getScrollMarkColor(attributes, editor.getColorsScheme());
int oldOffset = editor.getCaretModel().getOffset();
int horizontalScrollOffset = editor.getScrollingModel().getHorizontalScrollOffset();
int verticalScrollOffset = editor.getScrollingModel().getVerticalScrollOffset();
for (PsiReference occurrence : occurrences) {
PsiElement element = occurrence.getElement();
int startOffset = element.getTextRange().getStartOffset();
int start = startOffset + occurrence.getRangeInElement().getStartOffset();
int end = startOffset + occurrence.getRangeInElement().getEndOffset();
PsiFile containingFile = element.getContainingFile();
Project project = element.getProject();
// each reference can reside in its own injected editor
Editor textEditor = InjectedLanguageUtil.openEditorFor(containingFile, project);
if (textEditor != null) {
addOccurrenceHighlight(textEditor, start, end, attributes, flags, outHighlighters, scrollmarkColor);
}
}
editor.getCaretModel().moveToOffset(oldOffset);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
editor.getScrollingModel().scrollHorizontally(horizontalScrollOffset);
editor.getScrollingModel().scrollVertically(verticalScrollOffset);
}
@Override
public void addElementsOccurrenceHighlights(@NotNull Editor editor,
@NotNull PsiElement[] elements,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
Collection<RangeHighlighter> outHighlighters) {
addOccurrenceHighlights(editor, elements, attributes, hideByTextChange, outHighlighters);
}
@Override
public void addOccurrenceHighlight(@NotNull Editor editor,
int start,
int end,
TextAttributes attributes,
int flags,
Collection<RangeHighlighter> outHighlighters,
Color scrollmarkColor) {
RangeHighlighter highlighter = addSegmentHighlighter(editor, start, end, attributes, flags);
if (outHighlighters != null) {
outHighlighters.add(highlighter);
}
if (scrollmarkColor != null) {
highlighter.setErrorStripeMarkColor(scrollmarkColor);
}
}
@Override
public void addRangeHighlight(@NotNull Editor editor,
int startOffset,
int endOffset,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
@Nullable Collection<RangeHighlighter> highlighters) {
addRangeHighlight(editor, startOffset, endOffset, attributes, hideByTextChange, false, highlighters);
}
@Override
public void addRangeHighlight(@NotNull Editor editor,
int startOffset,
int endOffset,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
boolean hideByAnyKey,
@Nullable Collection<RangeHighlighter> highlighters) {
int flags = HIDE_BY_ESCAPE;
if (hideByTextChange) {
flags |= HIDE_BY_TEXT_CHANGE;
}
if (hideByAnyKey) {
flags |= HIDE_BY_ANY_KEY;
}
Color scrollmarkColor = getScrollMarkColor(attributes, editor.getColorsScheme());
addOccurrenceHighlight(editor, startOffset, endOffset, attributes, flags, highlighters, scrollmarkColor);
}
@Override
public void addOccurrenceHighlights(@NotNull Editor editor,
@NotNull PsiElement[] elements,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
Collection<RangeHighlighter> outHighlighters) {
if (elements.length == 0) return;
int flags = HIDE_BY_ESCAPE;
if (hideByTextChange) {
flags |= HIDE_BY_TEXT_CHANGE;
}
Color scrollmarkColor = getScrollMarkColor(attributes, editor.getColorsScheme());
if (editor instanceof EditorWindow) {
editor = ((EditorWindow)editor).getDelegate();
}
for (PsiElement element : elements) {
TextRange range = element.getTextRange();
range = InjectedLanguageManager.getInstance(myProject).injectedToHost(element, range);
addOccurrenceHighlight(editor,
trimOffsetToDocumentSize(editor, range.getStartOffset()),
trimOffsetToDocumentSize(editor, range.getEndOffset()),
attributes, flags, outHighlighters, scrollmarkColor);
}
}
private static int trimOffsetToDocumentSize(@NotNull Editor editor, int offset) {
if (offset < 0) return 0;
int textLength = editor.getDocument().getTextLength();
return offset < textLength ? offset : textLength;
}
@Nullable
private static Color getScrollMarkColor(@NotNull TextAttributes attributes, @NotNull EditorColorsScheme colorScheme) {
if (attributes.getErrorStripeColor() != null) return attributes.getErrorStripeColor();
if (attributes.getBackgroundColor() != null) {
boolean isDark = ColorUtil.isDark(colorScheme.getDefaultBackground());
return isDark ? attributes.getBackgroundColor().brighter() : attributes.getBackgroundColor().darker();
}
return null;
}
public boolean hideHighlights(@NotNull Editor editor, @HideFlags int mask) {
Map<RangeHighlighter, HighlightInfo> map = getHighlightInfoMap(editor, false);
if (map == null) return false;
boolean done = false;
ArrayList<RangeHighlighter> highlightersToRemove = new ArrayList<>();
for (RangeHighlighter highlighter : map.keySet()) {
HighlightInfo info = map.get(highlighter);
if (!info.editor.equals(editor)) continue;
if ((info.flags & mask) != 0) {
highlightersToRemove.add(highlighter);
done = true;
}
}
for (RangeHighlighter highlighter : highlightersToRemove) {
removeSegmentHighlighter(editor, highlighter);
}
return done;
}
private class MyAnActionListener implements AnActionListener {
@Override
public void beforeActionPerformed(AnAction action, final DataContext dataContext, AnActionEvent event) {
requestHideHighlights(dataContext);
}
@Override
public void beforeEditorTyping(char c, DataContext dataContext) {
requestHideHighlights(dataContext);
}
private void requestHideHighlights(final DataContext dataContext) {
final Editor editor = CommonDataKeys.EDITOR.getData(dataContext);
if (editor == null) return;
hideHighlights(editor, HIDE_BY_ANY_KEY);
}
}
private final Key<Map<RangeHighlighter, HighlightInfo>> HIGHLIGHT_INFO_MAP_KEY = Key.create("HIGHLIGHT_INFO_MAP_KEY");
static class HighlightInfo {
final Editor editor;
@HideFlags final int flags;
public HighlightInfo(Editor editor, @HideFlags int flags) {
this.editor = editor;
this.flags = flags;
}
}
}
| |
package org.wikipathways.wp2rdf.ontologies;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.Resource;
/**
* Vocabulary definitions from http://www.w3.org/TR/2012/WD-prov-o-20120503/prov.owl
* @author Auto-generated by schemagen on 14 Sep 2012 14:08
*/
public class Prov {
/** <p>The RDF model that holds the vocabulary terms</p> */
private static Model m_model = ModelFactory.createDefaultModel();
/** <p>The namespace of the vocabulary as a string</p> */
public static final String NS = "http://www.w3.org/ns/prov#";
/** <p>The namespace of the vocabulary as a string</p>
* @see #NS */
public static String getURI() {return NS;}
/** <p>The namespace of the vocabulary as a resource</p> */
public static final Resource NAMESPACE = m_model.createResource( NS );
/** <p>An object property to express the accountability of an agent towards another
* agent. The subordinate agent acted on behalf of the responsible agent in an
* actual activity.</p>
*/
public static final Property actedOnBehalfOf = m_model.createProperty( "http://www.w3.org/ns/prov#actedOnBehalfOf" );
/** <p>The property used by an prov:ActivityInvolvement to cite the prov:Activity
* that was involved with either an Activity or Entity. It can be used to refer
* to the activity involved in generating an entity, informing another activity,
* or starting another activity.</p>
*/
public static final Property activity = m_model.createProperty( "http://www.w3.org/ns/prov#activity" );
/** <p>The property used by a prov:AgentInvolvement to cite the Agent that was prov:involved
* with either an Activity or Entity. It can be used to express the agent involved
* in being responsible for an activity, being attributed to an entity, starting
* or ending an activity, or being responsible for another subordinate agent
* in an activity.</p>
*/
public static final Property agent = m_model.createProperty( "http://www.w3.org/ns/prov#agent" );
/** <p>An entity is alternate of another if they are both a specialization of some
* common entity. The common entity does not need to be identified.</p>
*/
public static final Property alternateOf = m_model.createProperty( "http://www.w3.org/ns/prov#alternateOf" );
/** <p>Location is an optional attribute of Entity and Activity.</p> */
public static final Property atLocation = m_model.createProperty( "http://www.w3.org/ns/prov#atLocation" );
public static final Property atTime = m_model.createProperty( "http://www.w3.org/ns/prov#atTime" );
/** <p>The dictionary was derived from the other by insertion. prov:qualifiedInsertion
* shows details of the insertion, in particular the inserted key-value pairs.</p>
*/
public static final Property derivedByInsertionFrom = m_model.createProperty( "http://www.w3.org/ns/prov#derivedByInsertionFrom" );
/** <p>The dictionary was derived from the other by removal. prov:qualifiedRemoval
* shows details of the removal, in particular the removed key-value pairs.</p>
*/
public static final Property derivedByRemovalFrom = m_model.createProperty( "http://www.w3.org/ns/prov#derivedByRemovalFrom" );
/** <p>The property used by a prov:DictionaryInvolvement to cite the prov:Dictionary
* that was prov:involved in insertion or removal of elements of a collection.</p>
*/
public static final Property dictionary = m_model.createProperty( "http://www.w3.org/ns/prov#dictionary" );
/** <p>The time when an activity ended.</p> */
public static final Property endedAtTime = m_model.createProperty( "http://www.w3.org/ns/prov#endedAtTime" );
/** <p>The property used by an prov:EntityInvolvement to cite the Entity that was
* prov:involved with either an Activity or Entity. It can be used to refer to
* the entity involved in deriving another entity, being quoted or revised from,
* being the source of another entity, or being used in an activity.</p>
*/
public static final Property entity = m_model.createProperty( "http://www.w3.org/ns/prov#entity" );
/** <p>This inverse of prov:wasGeneratedBy is defined so that Activities being described
* can reference their generated outputs directly without needing to 'stop' and
* start describing the Entity. This helps 'Activity-centric' modeling as opposed
* to 'Entity-centric' modeling.</p>
*/
public static final Property generated = m_model.createProperty( "http://www.w3.org/ns/prov#generated" );
/** <p>The activity generating the derived entity and using the derived-from entity.</p> */
public static final Property hadActivity = m_model.createProperty( "http://www.w3.org/ns/prov#hadActivity" );
/** <p>The generation involving the generated entity and activity.</p> */
public static final Property hadGeneration = m_model.createProperty( "http://www.w3.org/ns/prov#hadGeneration" );
public static final Property hadOriginalSource = m_model.createProperty( "http://www.w3.org/ns/prov#hadOriginalSource" );
/** <p>The Activity performed was described by the given plan resource. Activity
* specifications, as referred to by plan links, are out of scope of this specification</p>
*/
public static final Property hadPlan = m_model.createProperty( "http://www.w3.org/ns/prov#hadPlan" );
/** <p>The agent who attributed to the original entity.</p> */
public static final Property hadQuoted = m_model.createProperty( "http://www.w3.org/ns/prov#hadQuoted" );
/** <p>The agent who is doing the quoting.</p> */
public static final Property hadQuoter = m_model.createProperty( "http://www.w3.org/ns/prov#hadQuoter" );
/** <p>When an instance of prov:Involvement uses the prov:hadRole property to cite
* a prov:Role, it is providing a role for the instance referenced by the prov:entity
* or the prov:activity properties. For example, :baking prov:used :spoon; prov:qualified
* [ a prov:Usage; prov:entity :spoon; prov:hadRole roles:mixing_implement ].</p>
*/
public static final Property hadRole = m_model.createProperty( "http://www.w3.org/ns/prov#hadRole" );
/** <p>The usage involving the used entity and activity.</p> */
public static final Property hadUsage = m_model.createProperty( "http://www.w3.org/ns/prov#hadUsage" );
public static final Property hasAnnotation = m_model.createProperty( "http://www.w3.org/ns/prov#hasAnnotation" );
/** <p>An object property to refer to the prov:KeyValuePair inserted into a prov:Collection.</p> */
public static final Property inserted = m_model.createProperty( "http://www.w3.org/ns/prov#inserted" );
/** <p>Subproperties of prov:involved may be be qualified by creating instances of
* a corresponding prov:Involvement class. For example, the binary relation :baking
* prov:used :spoon can be qualified by asserting :baking prov:qualified [ a
* prov:Usage; prov:entity :baking; :foo :bar ] prov:involved should not be used
* without also using one of its subproperties. Subproperties of prov:involved
* may also be asserted directly without being qualified.</p>
*/
public static final Property involved = m_model.createProperty( "http://www.w3.org/ns/prov#involved" );
/** <p>Subproperties of prov:involvee are used to cite the object of an unqualified
* PROV-O triple whose predicate is a subproperty of prov:involved (e.g. prov:used,
* prov:wasGeneratedBy). prov:involvee is used much like rdf:object is used.</p>
*/
public static final Property involvee = m_model.createProperty( "http://www.w3.org/ns/prov#involvee" );
/** <p>The key of a KeyValuePair, which is an element of a prov:Collection.</p> */
public static final Property key = m_model.createProperty( "http://www.w3.org/ns/prov#key" );
/** <p>The key-value pair was part of the membership. A membership can have multiple
* members.</p>
*/
public static final Property member = m_model.createProperty( "http://www.w3.org/ns/prov#member" );
/** <p>The collection included the specified membership of keys-values.</p> */
public static final Property membership = m_model.createProperty( "http://www.w3.org/ns/prov#membership" );
/** <p>If this Activity prov:wasAssociatedWith Agent :ag, then it can qualify the
* Association using prov:qualifiedAssociation [ a prov:Association; prov:agent
* :ag; :foo :bar ].</p>
*/
public static final Property qualifiedAssociation = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedAssociation" );
/** <p>If this Entity prov:wasAttributedTo Agent :ag, then it can qualify how it
* was using prov:qualifiedAttribution [ a prov:Attribution; prov:agent :ag;
* :foo :bar ].</p>
*/
public static final Property qualifiedAttribution = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedAttribution" );
/** <p>If this Activity prov:wasInformedBy Activity :a, then it can qualify how it
* was Inform[ed] using prov:qualifiedInform [ a prov:Inform; prov:activity :a;
* :foo :bar ].</p>
*/
public static final Property qualifiedCommunication = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedCommunication" );
/** <p>If this Entity prov:wasDerivedFrom Entity :e, then it can qualify how it was
* derived using prov:qualifiedDerivation [ a prov:Derivation; prov:entity :e;
* :foo :bar ].</p>
*/
public static final Property qualifiedDerivation = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedDerivation" );
/** <p>If this Activity prov:wasEndedBy Entity :e1, then it can qualify how it was
* ended using prov:qualifiedEnd [ a prov:End; prov:entity :e1; :foo :bar ].</p>
*/
public static final Property qualifiedEnd = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedEnd" );
/** <p>If this Activity prov:generated Entity :e, then it can qualify how it did
* performed the Generation using prov:qualifiedGeneration [ a prov:Generation;
* prov:entity :e; :foo :bar ].</p>
*/
public static final Property qualifiedGeneration = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedGeneration" );
/** <p>If this Dictionary prov:derivedByInsertionFrom another Dictionary :e, then
* it can qualify how it did perform the Insertion using prov:qualifiedInsertion
* [ a prov:Insertion; prov:dictionary :e; prov:inserted [a prov:KeyValuePair;
* prov:key "k1"^^xsd:string; prov:value :foo] ].</p>
*/
public static final Property qualifiedInsertion = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedInsertion" );
/** <p>If this Entity prov:wasQuotedFrom Entity :e, then it can qualify how using
* prov:qualifiedQuotation [ a prov:Quotation; prov:entity :e; :foo :bar ].</p>
*/
public static final Property qualifiedQuotation = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedQuotation" );
/** <p>If this Dictionary prov:derivedByRemovalFrom another Dictionary :e, then it
* can qualify how it did perform the Removal using prov:qualifiedRemoval [ a
* prov:Removal; prov:dictionary :c; prov:removed "k1"^^xsd:string ].</p>
*/
public static final Property qualifiedRemoval = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedRemoval" );
/** <p>If this Agent prov:actedOnBehalfOf Agent :ag, then it can qualify how with
* prov:qualifiedResponsibility [ a prov:Responsibility; prov:agent :ag; :foo
* :bar ].</p>
*/
public static final Property qualifiedResponsibility = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedResponsibility" );
/** <p>If this Entity prov:wasRevisionOf Entity :e, then it can qualify how it was
* revised using prov:qualifiedRevision [ a prov:Revision; prov:entity :e; :foo
* :bar ].</p>
*/
public static final Property qualifiedRevision = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedRevision" );
/** <p>If this Entity prov:hadOriginalSource Entity :e, then it can qualify how using
* prov:qualifiedSource [ a prov:Source; prov:entity :e; :foo :bar ].</p>
*/
public static final Property qualifiedSource = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedSource" );
/** <p>If this Activity prov:wasStartedBy Entity :e1, then it can qualify how it
* was started using prov:qualifiedStart [ a prov:Start; prov:entity :e1; :foo
* :bar ].</p>
*/
public static final Property qualifiedStart = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedStart" );
/** <p>If this Activity prov:wasStartedByActivity :a, then it can qualify how it
* was started using prov:qualifiedStart [ a prov:Start; prov:activity :a; :foo
* :bar ].</p>
*/
public static final Property qualifiedStartByActivity = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedStartByActivity" );
/** <p>If this prov:wasAttributedTo Entity :e, then it can qualify how using prov:qualifiedTrace
* [ a prov:Trace; prov:entity :e; :foo :bar ]. If this prov:wasAttributedTo
* Activity :a, then it can qualify how using prov:qualifiedTrace [ a prov:Trace;
* prov:activity :a; :foo :bar ].</p>
*/
public static final Property qualifiedTrace = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedTrace" );
/** <p>If this Activity prov:used Entity :e, then it can qualify how it used it using
* prov:qualifiedUsage [ a prov:Usage; prov:entity :e; :foo :bar ].</p>
*/
public static final Property qualifiedUsage = m_model.createProperty( "http://www.w3.org/ns/prov#qualifiedUsage" );
/** <p>The key removed in a Removal.</p> */
public static final Property removedKey = m_model.createProperty( "http://www.w3.org/ns/prov#removedKey" );
/** <p>An entity is a specialization of another if they both refer to some common
* thing but the former is a more constrained entity than the former. The common
* entity does not need to be identified.</p>
*/
public static final Property specializationOf = m_model.createProperty( "http://www.w3.org/ns/prov#specializationOf" );
/** <p>The time when an activity started.</p> */
public static final Property startedAtTime = m_model.createProperty( "http://www.w3.org/ns/prov#startedAtTime" );
/** <p>An object property to link back an entity to another by means of derivation
* or responsibility relations, possibly repeatedly traversed.</p>
*/
public static final Property tracedTo = m_model.createProperty( "http://www.w3.org/ns/prov#tracedTo" );
/** <p>A prov:Entity that was used by this prov:Activity. For example, :baking prov:used
* :spoon, :egg, :oven .</p>
*/
public static final Property used = m_model.createProperty( "http://www.w3.org/ns/prov#used" );
/** <p>The value of a KeyValuePair.</p> */
public static final Property value = m_model.createProperty( "http://www.w3.org/ns/prov#value" );
/** <p>An prov:Agent that had some (unspecified) responsibility for the occurrence
* of this prov:Activity.</p>
*/
public static final Property wasAssociatedWith = m_model.createProperty( "http://www.w3.org/ns/prov#wasAssociatedWith" );
/** <p>Attribution is the ascribing of an entity to an agent.</p> */
public static final Property wasAttributedTo = m_model.createProperty( "http://www.w3.org/ns/prov#wasAttributedTo" );
/** <p>A derivation is a transformation of an entity into another, a construction
* of an entity into another, or an update of an entity, resulting in a new one.</p>
*/
public static final Property wasDerivedFrom = m_model.createProperty( "http://www.w3.org/ns/prov#wasDerivedFrom" );
/** <p>End is when an activity is deemed to have ended. An end may refer to an entity,
* known as trigger, that terminated the activity.</p>
*/
public static final Property wasEndedBy = m_model.createProperty( "http://www.w3.org/ns/prov#wasEndedBy" );
/** <p>wasGeneratedBy links Entitites with Activity representing that entity was
* generated as a result of Activity</p>
*/
public static final Property wasGeneratedBy = m_model.createProperty( "http://www.w3.org/ns/prov#wasGeneratedBy" );
/** <p>An activity a2 is dependent on or informed by another activity a1, by way
* of some unspecified entity that is generated by a1 and used by a2.</p>
*/
public static final Property wasInformedBy = m_model.createProperty( "http://www.w3.org/ns/prov#wasInformedBy" );
public static final Property wasInvalidatedBy = m_model.createProperty( "http://www.w3.org/ns/prov#wasInvalidatedBy" );
/** <p>An entity is derived from an original entity by copying, or "quoting", some
* or all of it.</p>
*/
public static final Property wasQuotedFrom = m_model.createProperty( "http://www.w3.org/ns/prov#wasQuotedFrom" );
/** <p>A revision is a derivation that revises an entity into a revised version.</p> */
public static final Property wasRevisionOf = m_model.createProperty( "http://www.w3.org/ns/prov#wasRevisionOf" );
/** <p>Start is when an activity is deemed to have started. A start may refer to
* an entity, known as trigger, that initiated the activity.</p>
*/
public static final Property wasStartedBy = m_model.createProperty( "http://www.w3.org/ns/prov#wasStartedBy" );
/** <p>The start of an activity with an implicit trigger generated by another activity.</p> */
public static final Property wasStartedByActivity = m_model.createProperty( "http://www.w3.org/ns/prov#wasStartedByActivity" );
/** <p>Note that there are kinds of accounts (e.g. handwritten letters, audio recordings,
* etc.) that are not expressed in PROV-O, but can be still be described by PROV-O.</p>
*/
public static final Resource Account = m_model.createResource( "http://www.w3.org/ns/prov#Account" );
/** <p>An activity is something that occurs over a period of time and acts upon or
* with entities. This action can take multiple forms: consuming, processing,
* transforming, modifying, relocating, using, generating, or being associated
* with entities. Activities that operate on digital entities may for example
* move, copy, or duplicate them.</p>
*/
public static final Resource Activity = m_model.createResource( "http://www.w3.org/ns/prov#Activity" );
/** <p>prov:ActivityInvolvement provides descriptions of any binary involvement between
* any instance and an prov:Activity.</p>
*/
public static final Resource ActivityInvolvement = m_model.createResource( "http://www.w3.org/ns/prov#ActivityInvolvement" );
/** <p>An agent is a type of entity that bears some form of responsibility for an
* activity taking place.</p>
*/
public static final Resource Agent = m_model.createResource( "http://www.w3.org/ns/prov#Agent" );
/** <p>prov:AgentInvolvement provides descriptions of any binary involvement between
* any instance and an prov:Agent.</p>
*/
public static final Resource AgentInvolvement = m_model.createResource( "http://www.w3.org/ns/prov#AgentInvolvement" );
/** <p>An instance of prov:Association provides additional descriptions about the
* binary prov:wasAssociatedWith relation from an prov:Activity to some prov:Agent
* that is responsible for it. For example, :baking prov:wasAssociatedWith :baker;
* prov:qualified [ a prov:Association; prov:entity :baker; :foo :bar ].</p>
*/
public static final Resource Association = m_model.createResource( "http://www.w3.org/ns/prov#Association" );
/** <p>An instance of prov:Attribution provides additional descriptions about the
* binary prov:wasAttributedTo relation from an prov:Entity to some prov:Agent
* that is responsible for it. For example, :cake prov:wasAttributedTo :baker;
* prov:qualified [ a prov:Attribution; prov:entity :baker; :foo :bar ].</p>
*/
public static final Resource Attribution = m_model.createResource( "http://www.w3.org/ns/prov#Attribution" );
/** <p>An instance of prov:Communication provides additional descriptions about the
* binary prov:wasInformedBy relation from an informed prov:Activity to the prov:Activity
* that informed it. For example, :you_jumping_off_bridge prov:wasInformedBy
* :everyone_else_jumping_off_bridge; prov:qualifiedCommunication [ a prov:Communication;
* prov:entity :everyone_else_jumping_off_bridge; :foo :bar ].</p>
*/
public static final Resource Communication = m_model.createResource( "http://www.w3.org/ns/prov#Communication" );
/** <p>An instance of prov:Derivation provides additional descriptions about the
* binary prov:wasDerivedFrom relation from some prov:Entity to another prov:Entity.
* For example, :chewed_bubble_gum prov:wasDerivedFrom :unwrapped_bubble_gum;
* prov:qualified [ a prov:Derivation; prov:entity :unwrapped_bubble_gum; :foo
* :bar ].</p>
*/
public static final Resource Derivation = m_model.createResource( "http://www.w3.org/ns/prov#Derivation" );
/** <p>This concept allows for the provenance of the dictionary, but also of its
* constituents to be expressed. Such a notion of dictionary corresponds to a
* wide variety of concrete data structures, such as a maps or associative arrays.A
* given dictionary forms a given structure for its members. A different structure
* (obtained either by insertion or removal of members) constitutes a different
* dictionary.</p>
*/
public static final Resource Dictionary = m_model.createResource( "http://www.w3.org/ns/prov#Dictionary" );
/** <p>prov:DictionaryInvolvement provides descriptions of any binary involvement
* between any instance and a prov:Dictionary.</p>
*/
public static final Resource DictionaryInvolvement = m_model.createResource( "http://www.w3.org/ns/prov#DictionaryInvolvement" );
/** <p>An empty dictionary.</p> */
public static final Resource EmptyDictionary = m_model.createResource( "http://www.w3.org/ns/prov#EmptyDictionary" );
/** <p>An instance of prov:End provides additional descriptions about the binary
* prov:wasEndedBy relation from some ended prov:Activity to an prov:Entity that
* ended it. For example, :ball_game prov:wasEndedBy :buzzer; prov:qualified
* [ a prov:Usage; prov:entity :buzzer; :foo :bar; prov:atTime '2012-03-09T08:05:08-05:00'^^xsd:dateTime
* ].</p>
*/
public static final Resource End = m_model.createResource( "http://www.w3.org/ns/prov#End" );
/** <p>An entity is a thing one wants to provide provenance for. For the purpose
* of this specification, things can be physical, digital, conceptual, or otherwise;
* things may be real or imaginary.</p>
*/
public static final Resource Entity = m_model.createResource( "http://www.w3.org/ns/prov#Entity" );
/** <p>prov:EntityInvolvement provides descriptions of any binary involvement between
* any instance and an prov:Entity.</p>
*/
public static final Resource EntityInvolvement = m_model.createResource( "http://www.w3.org/ns/prov#EntityInvolvement" );
/** <p>An instance of prov:Generation provides additional descriptions about the
* binary prov:wasGeneratedBy relation from a generated prov:Entity to the prov:Activity
* that generated it. For example, :cake prov:wasGeneratedBy :baking; prov:qualifiedGeneration
* [ a prov:Generation; prov:entity :baking; :foo :bar ].</p>
*/
public static final Resource Generation = m_model.createResource( "http://www.w3.org/ns/prov#Generation" );
/** <p>Insertion is a derivation that transforms a dictionary into another, by insertion
* of one or more key-value pairs.</p>
*/
public static final Resource Insertion = m_model.createResource( "http://www.w3.org/ns/prov#Insertion" );
/** <p>An instantaneous event, or event for short, happens in the world and marks
* a change in the world, in its activities and in its entities. The term 'event'
* is commonly used in process algebra with a similar meaning. Events represent
* communications or interactions; they are assumed to be atomic and instantaneous.</p>
*/
public static final Resource InstantaneousEvent = m_model.createResource( "http://www.w3.org/ns/prov#InstantaneousEvent" );
public static final Resource Invalidation = m_model.createResource( "http://www.w3.org/ns/prov#Invalidation" );
/** <p>Any resource that involved an prov:Activity, prov:Entity, or prov:Agent can
* qualify its involvement by also referencing an instance of prov:Involvement.
* Instances of prov:Involvement reference the involved Activity, Entity, or
* Agent (using prov:activity, prov:entity, or prov:agent, respectively) and
* may be described with any kind of attributes, including user-defined attributes
* and those provided by PROV (prov:hadRole, prov:hadPlan, prov:atTime, prov:hadLocation).
* A description (via prov:Involvement) of the binary involvement implies the
* assertion of the binary involvement.</p>
*/
public static final Resource Involvement = m_model.createResource( "http://www.w3.org/ns/prov#Involvement" );
/** <p>A key-value pair. Part of a prov:Collection through prov:Membership. The key
* is any RDF Literal, the value is a prov:Entity.</p>
*/
public static final Resource KeyValuePair = m_model.createResource( "http://www.w3.org/ns/prov#KeyValuePair" );
/** <p>A location can be an identifiable geographic place (ISO 19112), but it can
* also be a non-geographic place such as a directory, row, or column. As such,
* there are numerous ways in which location can be expressed, such as by a coordinate,
* address, landmark, and so forth.</p>
*/
public static final Resource Location = m_model.createResource( "http://www.w3.org/ns/prov#Location" );
/** <p>Described members of a collection, in the form of key-value pairs. The Membership
* resource can also be annotated with attributes.</p>
*/
public static final Resource Membership = m_model.createResource( "http://www.w3.org/ns/prov#Membership" );
/** <p>Attribute-value pairs occurring in notes are application specific.</p> */
public static final Resource Note = m_model.createResource( "http://www.w3.org/ns/prov#Note" );
/** <p>Agents of type Organization are social institutions such as companies, societies
* etc.</p>
*/
public static final Resource Organization = m_model.createResource( "http://www.w3.org/ns/prov#Organization" );
/** <p>Agents of type Person are people.</p> */
public static final Resource Person = m_model.createResource( "http://www.w3.org/ns/prov#Person" );
/** <p>There exist no prescriptive requirement on the nature of plans, their representation,
* the actions or steps they consist of, or their intended goals. Since plans
* may evolve over time, it may become necessary to track their provenance, so
* plans themselves are entities. Representing the plan explicitly in the provenance
* can be useful for various tasks: for example, to validate the execution as
* represented in the provenance record, to manage expectation failures, or to
* provide explanations.</p>
*/
public static final Resource Plan = m_model.createResource( "http://www.w3.org/ns/prov#Plan" );
/** <p>An instance of prov:Quotation provides additional descriptions about the binary
* prov:wasQuotedFrom relation from some prov:Entity to another prov:Entity.
* For example, :here_is_looking_at_you_kid prov:wasQuotedFrom :casablanca_script;
* prov:qualified [ a prov:Quotation; prov:entity :casablanca_script; :foo :bar
* ].</p>
*/
public static final Resource Quotation = m_model.createResource( "http://www.w3.org/ns/prov#Quotation" );
/** <p>Removal is a derivation that transforms a dictionary into another, by removing
* one or more key-value pairs.</p>
*/
public static final Resource Removal = m_model.createResource( "http://www.w3.org/ns/prov#Removal" );
/** <p>An instance of prov:Responsibility provides additional descriptions about
* the binary prov:actedOnBehalfOf relation from a performing prov:Agent to some
* prov:Agent for whom it was performed. For example, :mixing prov:wasAssociatedWith
* :toddler . :toddler prov:actedOnBehalfOf :mother; prov:qualified [ a prov:Responsiblity;
* prov:entity :mother; :foo :bar ].</p>
*/
public static final Resource Responsibility = m_model.createResource( "http://www.w3.org/ns/prov#Responsibility" );
/** <p>An instance of prov:Revision provides additional descriptions about the binary
* prov:wasRevisionOf relation from some prov:Entity to another prov:Entity.
* For example, :draft_2 prov:wasRevisionOf :draft_1; prov:qualified [ a prov:Source;
* prov:entity :draft_1; :foo :bar ].</p>
*/
public static final Resource Revision = m_model.createResource( "http://www.w3.org/ns/prov#Revision" );
/** <p>A Role is the function of an entity with respect to an activity, in the context
* of a usage, generation, association, start, and end.</p>
*/
public static final Resource Role = m_model.createResource( "http://www.w3.org/ns/prov#Role" );
/** <p>A software agent is running software.</p> */
public static final Resource SoftwareAgent = m_model.createResource( "http://www.w3.org/ns/prov#SoftwareAgent" );
/** <p>An instance of prov:Source provides additional descriptions about the binary
* prov:hadOriginalSource relation from some prov:Entity to another prov:Entity.
* For example, :blog prov:hadOriginalSource :newsArticle; prov:qualified [ a
* prov:Source; prov:entity :newsArticle; :foo :bar ].</p>
*/
public static final Resource Source = m_model.createResource( "http://www.w3.org/ns/prov#Source" );
/** <p>An instance of prov:Start provides additional descriptions about the binary
* prov:wasStartedBy relation from some started prov:Activity to an prov:Entity
* that started it. For example, :foot_race prov:wasStartedBy :bang; prov:qualified
* [ a prov:Usage; prov:entity :bang; :foo :bar; prov:atTime '2012-03-09T08:05:08-05:00'^^xsd:dateTime
* ].</p>
*/
public static final Resource Start = m_model.createResource( "http://www.w3.org/ns/prov#Start" );
/** <p>An instance of prov:StartedByActivity provides additional descriptions about
* the binary prov:wasStartedByActivity relation from some started prov:Activity
* to another prov:Activity started it. For example, :second_leg_of_relay prov:wasStartedByActivity
* :handed_baton; prov:qualified [ a prov:Usage; prov:entity :handed_baton; :foo
* :bar; prov:atTime '2012-03-09T08:05:08-05:00'^^xsd:dateTime ].</p>
*/
public static final Resource StartByActivity = m_model.createResource( "http://www.w3.org/ns/prov#StartByActivity" );
/** <p>An instance of prov:Trace provides additional descriptions about the binary
* prov:tracedTo relation from some prov:Entity to some other prov:Element. For
* example, :stomach_ache prov:tracedTo :spoon; prov:qualified [ a prov:Trace;
* prov:entity :spoon; :foo :bar ].</p>
*/
public static final Resource Trace = m_model.createResource( "http://www.w3.org/ns/prov#Trace" );
/** <p>An instance of prov:Usage provides additional descriptions about the binary
* prov:used relation from some prov:Activity to an prov:Entity that it used.
* For example, :keynote prov:used :podium; prov:qualified [ a prov:Usage; prov:entity
* :podium; :foo :bar ].</p>
*/
public static final Resource Usage = m_model.createResource( "http://www.w3.org/ns/prov#Usage" );
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import static org.apache.geode.cache.Region.SEPARATOR;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.Properties;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.RegionFactory;
import org.apache.geode.cache.RegionShortcut;
import org.apache.geode.cache.configuration.RegionConfig;
import org.apache.geode.distributed.ConfigurationProperties;
import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.rules.ClusterStartupRule;
import org.apache.geode.test.dunit.rules.MemberVM;
import org.apache.geode.test.junit.assertions.CommandResultAssert;
import org.apache.geode.test.junit.categories.OQLIndexTest;
import org.apache.geode.test.junit.rules.GfshCommandRule;
@Category({OQLIndexTest.class})
public class DestroyIndexCommandsDUnitTest {
private static final String REGION_1 = "REGION1";
private static final String INDEX_1 = "INDEX1";
private static final String INDEX_2 = "INDEX2";
private static final String GROUP_1 = "group1";
private static final String GROUP_2 = "group2";
private MemberVM locator;
private MemberVM server1;
private MemberVM server2;
@Rule
public ClusterStartupRule rule = new ClusterStartupRule();
@Rule
public GfshCommandRule gfsh = new GfshCommandRule();
@Before
public void before() throws Exception {
Properties props = new Properties();
props.setProperty(ConfigurationProperties.SERIALIZABLE_OBJECT_FILTER,
"org.apache.geode.management.internal.cli.domain.Stock");
locator = rule.startLocatorVM(0);
props.setProperty("groups", GROUP_1);
server1 = rule.startServerVM(1, props, locator.getPort());
props.setProperty("groups", GROUP_2);
server2 = rule.startServerVM(2, props, locator.getPort());
gfsh.connectAndVerify(locator);
gfsh.executeAndAssertThat(String.format("create region --name=%s --type=REPLICATE", REGION_1))
.statusIsSuccess();
CommandResultAssert createIndex1Assert = gfsh.executeAndAssertThat(
String.format("create index --name=%s --expression=key --region=%s", INDEX_1, REGION_1))
.statusIsSuccess();
createIndex1Assert.hasTableSection("createIndex").hasRowSize(2);
createIndex1Assert.containsOutput("Cluster configuration for group 'cluster' is updated");
CommandResultAssert createIndex2Assert = gfsh.executeAndAssertThat(
String.format("create index --name=%s --expression=id --region=%s", INDEX_2, REGION_1))
.statusIsSuccess();
createIndex2Assert.hasTableSection("createIndex").hasRowSize(2);
createIndex2Assert.containsOutput("Cluster configuration for group 'cluster' is updated");
assertIndexCount(REGION_1, 2);
}
@Test
@SuppressWarnings("deprecation")
public void testDestroyAllIndexesOnRegion() {
gfsh.executeAndAssertThat("destroy index --region=" + REGION_1).statusIsSuccess()
.tableHasColumnWithExactValuesInAnyOrder("Status", "OK", "OK")
.tableHasColumnWithExactValuesInAnyOrder("Message",
"Destroyed all indexes on region REGION1", "Destroyed all indexes on region REGION1");
assertIndexCount(REGION_1, 0);
// Check idempotency
gfsh.executeAndAssertThat("destroy index --if-exists --region=" + REGION_1).statusIsSuccess()
.tableHasColumnWithExactValuesInAnyOrder("Status", "OK", "OK")
.tableHasColumnWithExactValuesInAnyOrder("Message",
"Destroyed all indexes on region REGION1", "Destroyed all indexes on region REGION1");
}
@Test
@SuppressWarnings("deprecation")
public void testDestroyOneIndexOnRegion() {
gfsh.executeAndAssertThat("destroy index --name=" + INDEX_1 + " --region=" + REGION_1)
.statusIsSuccess().tableHasColumnWithExactValuesInAnyOrder("Message",
"Destroyed index INDEX1 on region REGION1", "Destroyed index INDEX1 on region REGION1");
assertIndexCount(REGION_1, 1);
// Check idempotency
gfsh.executeAndAssertThat(
"destroy index --if-exists --name=" + INDEX_1 + " --region=" + REGION_1).statusIsSuccess()
.tableHasColumnWithExactValuesInAnyOrder("Status", "IGNORED", "IGNORED")
.tableHasColumnWithExactValuesInAnyOrder("Message", "Index named \"INDEX1\" not found",
"Index named \"INDEX1\" not found");
assertIndexCount(REGION_1, 1);
// Check error result is correct
gfsh.executeAndAssertThat("destroy index --name=" + INDEX_1 + " --region=" + REGION_1)
.tableHasColumnWithExactValuesInAnyOrder("Status", "ERROR", "ERROR").statusIsError()
.tableHasColumnWithExactValuesInAnyOrder("Message", "Index named \"INDEX1\" not found",
"Index named \"INDEX1\" not found");
}
@Test
@SuppressWarnings("deprecation")
public void testDestroyAllIndexesOnOneMember() {
gfsh.executeAndAssertThat("destroy index --member=server-1").statusIsSuccess()
.tableHasColumnWithExactValuesInAnyOrder("Message", "Destroyed all indexes");
server1.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes()).isEmpty();
});
server2.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes().size()).isEqualTo(2);
});
// Check idempotency
gfsh.executeAndAssertThat("destroy index --if-exists --member=server-1").statusIsSuccess()
.tableHasColumnWithExactValuesInAnyOrder("Message", "Destroyed all indexes");
}
@Test
@SuppressWarnings("deprecation")
public void testDestroyOneIndexOnOneMember() {
gfsh.executeAndAssertThat("destroy index --name=" + INDEX_1 + " --member=server-1")
.statusIsSuccess()
.tableHasColumnWithExactValuesInAnyOrder("Message", "Destroyed index INDEX1");
server1.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes().size()).isEqualTo(1);
});
server2.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes().size()).isEqualTo(2);
});
// Check idempotency
gfsh.executeAndAssertThat("destroy index --if-exists --name=" + INDEX_1 + " --member=server-1")
.statusIsSuccess().tableHasColumnWithExactValuesInAnyOrder("Status", "IGNORED")
.tableHasColumnWithExactValuesInAnyOrder("Message", "Index named \"INDEX1\" not found");
// Check error result is correct
gfsh.executeAndAssertThat("destroy index --name=" + INDEX_1 + " --member=server-1")
.statusIsError().tableHasColumnWithExactValuesInAnyOrder("Status", "ERROR")
.tableHasColumnWithExactValuesInAnyOrder("Message", "Index named \"INDEX1\" not found");
}
@Test
@SuppressWarnings("deprecation")
public void testPartialSuccessResultDestroyOneIndex() {
gfsh.executeAndAssertThat("destroy index --name=" + INDEX_1 + " --member=server-1")
.statusIsSuccess().tableHasColumnWithExactValuesInAnyOrder("Status", "OK")
.tableHasColumnWithExactValuesInAnyOrder("Message", "Destroyed index INDEX1");
server1.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes().size()).isEqualTo(1);
});
server2.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes().size()).isEqualTo(2);
});
// Check error on partial failure
gfsh.executeAndAssertThat("destroy index --name=" + INDEX_1).statusIsSuccess()
.tableHasColumnWithExactValuesInAnyOrder("Status", "ERROR", "OK")
.tableHasColumnWithExactValuesInAnyOrder("Message", "Index named \"INDEX1\" not found",
"Destroyed index INDEX1");
assertIndexCount(REGION_1, 1);
}
@Test
@SuppressWarnings("deprecation")
public void destroyIndexOnOneGroupWithoutAssociatedClusterConfig() {
gfsh.executeAndAssertThat("destroy index --name=" + INDEX_1 + " --group=" + GROUP_1)
.statusIsSuccess().tableHasColumnWithValuesContaining("Member", "server-1")
.tableHasColumnWithExactValuesInAnyOrder("Status", "OK")
.tableHasColumnWithExactValuesInAnyOrder("Message", "Destroyed index INDEX1");
gfsh.executeAndAssertThat("destroy index --name=" + INDEX_2 + " --group=" + GROUP_2)
.statusIsSuccess().tableHasColumnWithValuesContaining("Member", "server-2")
.tableHasColumnWithExactValuesInAnyOrder("Status", "OK")
.tableHasColumnWithExactValuesInAnyOrder("Message", "Destroyed index INDEX2");
// The index count on each server and the cluster config will now have diverged because the
// index+region were not originally defined per group but at the cluster level.
server1.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes().size()).isEqualTo(1);
});
server2.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes().size()).isEqualTo(1);
});
locator.invoke(() -> {
InternalConfigurationPersistenceService svc =
ClusterStartupRule.getLocator().getConfigurationPersistenceService();
RegionConfig regionConfig = svc.getCacheConfig("cluster").findRegionConfiguration(REGION_1);
assertThat(regionConfig.getIndexes().size()).isEqualTo(2);
});
}
@Test
@SuppressWarnings("deprecation")
public void destroyIndexOnRegionNotInClusterConfig() {
IgnoredException.addIgnoredException("failed to update cluster config for cluster");
IgnoredException.addIgnoredException(
"org.apache.geode.management.internal.exceptions.EntityNotFoundException");
server1.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
RegionFactory<Object, Object> factory = cache.createRegionFactory(RegionShortcut.REPLICATE);
factory.create("REGION3");
cache.getQueryService().createIndex("INDEX3", "key", SEPARATOR + "REGION3");
});
gfsh.executeAndAssertThat("destroy index --name=INDEX3" + " --region=REGION3").statusIsSuccess()
.tableHasColumnWithExactValuesInAnyOrder("Status", "OK", "ERROR")
.tableHasColumnWithExactValuesInAnyOrder("Message",
"Destroyed index INDEX3 on region REGION3", "Region \"REGION3\" not found");
assertIndexCount(REGION_1, 2);
}
private void assertIndexCount(String region, int indexCount) {
server1.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes().size()).isEqualTo(indexCount);
});
server2.invoke(() -> {
Cache cache = ClusterStartupRule.getCache();
assertThat(cache.getQueryService().getIndexes().size()).isEqualTo(indexCount);
});
locator.invoke(() -> {
InternalConfigurationPersistenceService svc =
ClusterStartupRule.getLocator().getConfigurationPersistenceService();
RegionConfig regionConfig = svc.getCacheConfig("cluster").findRegionConfiguration(region);
assertThat(regionConfig.getIndexes().size()).isEqualTo(indexCount);
});
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequestBuilder;
import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder;
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequestBuilder;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequestBuilder;
import org.elasticsearch.action.admin.indices.optimize.OptimizeRequestBuilder;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
import org.elasticsearch.action.admin.indices.status.IndicesStatusRequestBuilder;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersRequestBuilder;
import org.elasticsearch.action.count.CountRequestBuilder;
import org.elasticsearch.action.deletebyquery.DeleteByQueryRequestBuilder;
import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder;
import org.elasticsearch.action.percolate.PercolateRequestBuilder;
import org.elasticsearch.action.percolate.PercolateSourceBuilder;
import org.elasticsearch.action.search.MultiSearchRequestBuilder;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.suggest.SuggestBuilders;
import org.elasticsearch.search.warmer.IndexWarmersMetaData;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.*;
public class IndicesOptionsIntegrationTests extends ElasticsearchIntegrationTest {
@Test
public void testSpecifiedIndexUnavailable_multipleIndices() throws Exception {
createIndex("test1");
ensureYellow();
// Verify defaults
verify(search("test1", "test2"), true);
verify(msearch(null, "test1", "test2"), true);
verify(count("test1", "test2"), true);
verify(clearCache("test1", "test2"), true);
verify(_flush("test1", "test2"),true);
verify(segments("test1", "test2"), true);
verify(stats("test1", "test2"), true);
verify(status("test1", "test2"), true);
verify(optimize("test1", "test2"), true);
verify(refresh("test1", "test2"), true);
verify(validateQuery("test1", "test2"), true);
verify(aliasExists("test1", "test2"), true);
verify(typesExists("test1", "test2"), true);
verify(deleteByQuery("test1", "test2"), true);
verify(percolate("test1", "test2"), true);
verify(mpercolate(null, "test1", "test2"), false);
verify(suggest("test1", "test2"), true);
verify(getAliases("test1", "test2"), true);
verify(getFieldMapping("test1", "test2"), true);
verify(getMapping("test1", "test2"), true);
verify(getWarmer("test1", "test2"), true);
verify(getSettings("test1", "test2"), true);
IndicesOptions options = IndicesOptions.strictExpandOpen();
verify(search("test1", "test2").setIndicesOptions(options), true);
verify(msearch(options, "test1", "test2"), true);
verify(count("test1", "test2").setIndicesOptions(options), true);
verify(clearCache("test1", "test2").setIndicesOptions(options), true);
verify(_flush("test1", "test2").setIndicesOptions(options),true);
verify(segments("test1", "test2").setIndicesOptions(options), true);
verify(stats("test1", "test2").setIndicesOptions(options), true);
verify(status("test1", "test2").setIndicesOptions(options), true);
verify(optimize("test1", "test2").setIndicesOptions(options), true);
verify(refresh("test1", "test2").setIndicesOptions(options), true);
verify(validateQuery("test1", "test2").setIndicesOptions(options), true);
verify(aliasExists("test1", "test2").setIndicesOptions(options), true);
verify(typesExists("test1", "test2").setIndicesOptions(options), true);
verify(deleteByQuery("test1", "test2").setIndicesOptions(options), true);
verify(percolate("test1", "test2").setIndicesOptions(options), true);
verify(mpercolate(options, "test1", "test2").setIndicesOptions(options), false);
verify(suggest("test1", "test2").setIndicesOptions(options), true);
verify(getAliases("test1", "test2").setIndicesOptions(options), true);
verify(getFieldMapping("test1", "test2").setIndicesOptions(options), true);
verify(getMapping("test1", "test2").setIndicesOptions(options), true);
verify(getWarmer("test1", "test2").setIndicesOptions(options), true);
verify(getSettings("test1", "test2").setIndicesOptions(options), true);
options = IndicesOptions.lenientExpandOpen();
verify(search("test1", "test2").setIndicesOptions(options), false);
verify(msearch(options, "test1", "test2").setIndicesOptions(options), false);
verify(count("test1", "test2").setIndicesOptions(options), false);
verify(clearCache("test1", "test2").setIndicesOptions(options), false);
verify(_flush("test1", "test2").setIndicesOptions(options), false);
verify(segments("test1", "test2").setIndicesOptions(options), false);
verify(stats("test1", "test2").setIndicesOptions(options), false);
verify(status("test1", "test2").setIndicesOptions(options), false);
verify(optimize("test1", "test2").setIndicesOptions(options), false);
verify(refresh("test1", "test2").setIndicesOptions(options), false);
verify(validateQuery("test1", "test2").setIndicesOptions(options), false);
verify(aliasExists("test1", "test2").setIndicesOptions(options), false);
verify(typesExists("test1", "test2").setIndicesOptions(options), false);
verify(deleteByQuery("test1", "test2").setIndicesOptions(options), false);
verify(percolate("test1", "test2").setIndicesOptions(options), false);
verify(mpercolate(options, "test1", "test2").setIndicesOptions(options), false);
verify(suggest("test1", "test2").setIndicesOptions(options), false);
verify(getAliases("test1", "test2").setIndicesOptions(options), false);
verify(getFieldMapping("test1", "test2").setIndicesOptions(options), false);
verify(getMapping("test1", "test2").setIndicesOptions(options), false);
verify(getWarmer("test1", "test2").setIndicesOptions(options), false);
verify(getSettings("test1", "test2").setIndicesOptions(options), false);
options = IndicesOptions.strictExpandOpen();
assertAcked(prepareCreate("test2"));
ensureYellow();
verify(search("test1", "test2").setIndicesOptions(options), false);
verify(msearch(options, "test1", "test2").setIndicesOptions(options), false);
verify(count("test1", "test2").setIndicesOptions(options), false);
verify(clearCache("test1", "test2").setIndicesOptions(options), false);
verify(_flush("test1", "test2").setIndicesOptions(options),false);
verify(segments("test1", "test2").setIndicesOptions(options), false);
verify(stats("test1", "test2").setIndicesOptions(options), false);
verify(status("test1", "test2").setIndicesOptions(options), false);
verify(optimize("test1", "test2").setIndicesOptions(options), false);
verify(refresh("test1", "test2").setIndicesOptions(options), false);
verify(validateQuery("test1", "test2").setIndicesOptions(options), false);
verify(aliasExists("test1", "test2").setIndicesOptions(options), false);
verify(typesExists("test1", "test2").setIndicesOptions(options), false);
verify(deleteByQuery("test1", "test2").setIndicesOptions(options), false);
verify(percolate("test1", "test2").setIndicesOptions(options), false);
verify(mpercolate(options, "test1", "test2").setIndicesOptions(options), false);
verify(suggest("test1", "test2").setIndicesOptions(options), false);
verify(getAliases("test1", "test2").setIndicesOptions(options), false);
verify(getFieldMapping("test1", "test2").setIndicesOptions(options), false);
verify(getMapping("test1", "test2").setIndicesOptions(options), false);
verify(getWarmer("test1", "test2").setIndicesOptions(options), false);
verify(getSettings("test1", "test2").setIndicesOptions(options), false);
}
@Test
public void testSpecifiedIndexUnavailable_singleIndexThatIsClosed() throws Exception {
assertAcked(prepareCreate("test1"));
ensureYellow();
assertAcked(client().admin().indices().prepareClose("test1"));
IndicesOptions options = IndicesOptions.strictExpandOpenAndForbidClosed();
verify(search("test1").setIndicesOptions(options), true);
verify(msearch(options, "test1"), true);
verify(count("test1").setIndicesOptions(options), true);
verify(clearCache("test1").setIndicesOptions(options), true);
verify(_flush("test1").setIndicesOptions(options),true);
verify(segments("test1").setIndicesOptions(options), true);
verify(stats("test1").setIndicesOptions(options), true);
verify(optimize("test1").setIndicesOptions(options), true);
verify(refresh("test1").setIndicesOptions(options), true);
verify(validateQuery("test1").setIndicesOptions(options), true);
verify(aliasExists("test1").setIndicesOptions(options), true);
verify(typesExists("test1").setIndicesOptions(options), true);
verify(deleteByQuery("test1").setIndicesOptions(options), true);
verify(percolate("test1").setIndicesOptions(options), true);
verify(mpercolate(options, "test1").setIndicesOptions(options), true);
verify(suggest("test1").setIndicesOptions(options), true);
verify(getAliases("test1").setIndicesOptions(options), true);
verify(getFieldMapping("test1").setIndicesOptions(options), true);
verify(getMapping("test1").setIndicesOptions(options), true);
verify(getWarmer("test1").setIndicesOptions(options), true);
verify(getSettings("test1").setIndicesOptions(options), true);
options = IndicesOptions.fromOptions(true, options.allowNoIndices(), options.expandWildcardsOpen(), options.expandWildcardsClosed(), options);
verify(search("test1").setIndicesOptions(options), false);
verify(msearch(options, "test1"), false);
verify(count("test1").setIndicesOptions(options), false);
verify(clearCache("test1").setIndicesOptions(options), false);
verify(_flush("test1").setIndicesOptions(options),false);
verify(segments("test1").setIndicesOptions(options), false);
verify(stats("test1").setIndicesOptions(options), false);
verify(optimize("test1").setIndicesOptions(options), false);
verify(refresh("test1").setIndicesOptions(options), false);
verify(validateQuery("test1").setIndicesOptions(options), false);
verify(aliasExists("test1").setIndicesOptions(options), false);
verify(typesExists("test1").setIndicesOptions(options), false);
verify(deleteByQuery("test1").setIndicesOptions(options), false);
verify(percolate("test1").setIndicesOptions(options), false);
verify(mpercolate(options, "test1").setIndicesOptions(options), false);
verify(suggest("test1").setIndicesOptions(options), false);
verify(getAliases("test1").setIndicesOptions(options), false);
verify(getFieldMapping("test1").setIndicesOptions(options), false);
verify(getMapping("test1").setIndicesOptions(options), false);
verify(getWarmer("test1").setIndicesOptions(options), false);
verify(getSettings("test1").setIndicesOptions(options), false);
assertAcked(client().admin().indices().prepareOpen("test1"));
ensureYellow();
options = IndicesOptions.strictExpandOpenAndForbidClosed();
verify(search("test1").setIndicesOptions(options), false);
verify(msearch(options, "test1"), false);
verify(count("test1").setIndicesOptions(options), false);
verify(clearCache("test1").setIndicesOptions(options), false);
verify(_flush("test1").setIndicesOptions(options),false);
verify(segments("test1").setIndicesOptions(options), false);
verify(stats("test1").setIndicesOptions(options), false);
verify(optimize("test1").setIndicesOptions(options), false);
verify(refresh("test1").setIndicesOptions(options), false);
verify(validateQuery("test1").setIndicesOptions(options), false);
verify(aliasExists("test1").setIndicesOptions(options), false);
verify(typesExists("test1").setIndicesOptions(options), false);
verify(deleteByQuery("test1").setIndicesOptions(options), false);
verify(percolate("test1").setIndicesOptions(options), false);
verify(mpercolate(options, "test1").setIndicesOptions(options), false);
verify(suggest("test1").setIndicesOptions(options), false);
verify(getAliases("test1").setIndicesOptions(options), false);
verify(getFieldMapping("test1").setIndicesOptions(options), false);
verify(getMapping("test1").setIndicesOptions(options), false);
verify(getWarmer("test1").setIndicesOptions(options), false);
verify(getSettings("test1").setIndicesOptions(options), false);
}
@Test
public void testSpecifiedIndexUnavailable_singleIndex() throws Exception {
IndicesOptions options = IndicesOptions.strictExpandOpenAndForbidClosed();
verify(search("test1").setIndicesOptions(options), true);
verify(msearch(options, "test1"), true);
verify(count("test1").setIndicesOptions(options), true);
verify(clearCache("test1").setIndicesOptions(options), true);
verify(_flush("test1").setIndicesOptions(options),true);
verify(segments("test1").setIndicesOptions(options), true);
verify(stats("test1").setIndicesOptions(options), true);
verify(optimize("test1").setIndicesOptions(options), true);
verify(refresh("test1").setIndicesOptions(options), true);
verify(validateQuery("test1").setIndicesOptions(options), true);
verify(aliasExists("test1").setIndicesOptions(options), true);
verify(typesExists("test1").setIndicesOptions(options), true);
verify(deleteByQuery("test1").setIndicesOptions(options), true);
verify(percolate("test1").setIndicesOptions(options), true);
verify(suggest("test1").setIndicesOptions(options), true);
verify(getAliases("test1").setIndicesOptions(options), true);
verify(getFieldMapping("test1").setIndicesOptions(options), true);
verify(getMapping("test1").setIndicesOptions(options), true);
verify(getWarmer("test1").setIndicesOptions(options), true);
verify(getSettings("test1").setIndicesOptions(options), true);
options = IndicesOptions.fromOptions(true, options.allowNoIndices(), options.expandWildcardsOpen(), options.expandWildcardsClosed(), options);
verify(search("test1").setIndicesOptions(options), false);
verify(msearch(options, "test1"), false);
verify(count("test1").setIndicesOptions(options), false);
verify(clearCache("test1").setIndicesOptions(options), false);
verify(_flush("test1").setIndicesOptions(options),false);
verify(segments("test1").setIndicesOptions(options), false);
verify(stats("test1").setIndicesOptions(options), false);
verify(optimize("test1").setIndicesOptions(options), false);
verify(refresh("test1").setIndicesOptions(options), false);
verify(validateQuery("test1").setIndicesOptions(options), false);
verify(aliasExists("test1").setIndicesOptions(options), false);
verify(typesExists("test1").setIndicesOptions(options), false);
verify(deleteByQuery("test1").setIndicesOptions(options), false);
verify(percolate("test1").setIndicesOptions(options), false);
verify(suggest("test1").setIndicesOptions(options), false);
verify(getAliases("test1").setIndicesOptions(options), false);
verify(getFieldMapping("test1").setIndicesOptions(options), false);
verify(getMapping("test1").setIndicesOptions(options), false);
verify(getWarmer("test1").setIndicesOptions(options), false);
verify(getSettings("test1").setIndicesOptions(options), false);
assertAcked(prepareCreate("test1"));
ensureYellow();
options = IndicesOptions.strictExpandOpenAndForbidClosed();
verify(search("test1").setIndicesOptions(options), false);
verify(msearch(options, "test1"), false);
verify(count("test1").setIndicesOptions(options), false);
verify(clearCache("test1").setIndicesOptions(options), false);
verify(_flush("test1").setIndicesOptions(options),false);
verify(segments("test1").setIndicesOptions(options), false);
verify(stats("test1").setIndicesOptions(options), false);
verify(optimize("test1").setIndicesOptions(options), false);
verify(refresh("test1").setIndicesOptions(options), false);
verify(validateQuery("test1").setIndicesOptions(options), false);
verify(aliasExists("test1").setIndicesOptions(options), false);
verify(typesExists("test1").setIndicesOptions(options), false);
verify(deleteByQuery("test1").setIndicesOptions(options), false);
verify(percolate("test1").setIndicesOptions(options), false);
verify(suggest("test1").setIndicesOptions(options), false);
verify(getAliases("test1").setIndicesOptions(options), false);
verify(getFieldMapping("test1").setIndicesOptions(options), false);
verify(getMapping("test1").setIndicesOptions(options), false);
verify(getWarmer("test1").setIndicesOptions(options), false);
verify(getSettings("test1").setIndicesOptions(options), false);
}
@Test
public void testSpecifiedIndexUnavailable_snapshotRestore() throws Exception {
createIndex("test1");
ensureGreen("test1");
waitForRelocation();
PutRepositoryResponse putRepositoryResponse = client().admin().cluster().preparePutRepository("dummy-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", randomRepoPath())).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
client().admin().cluster().prepareCreateSnapshot("dummy-repo", "snap1").setWaitForCompletion(true).get();
verify(snapshot("snap2", "test1", "test2"), true);
verify(restore("snap1", "test1", "test2"), true);
IndicesOptions options = IndicesOptions.strictExpandOpen();
verify(snapshot("snap2", "test1", "test2").setIndicesOptions(options), true);
verify(restore("snap1", "test1", "test2").setIndicesOptions(options), true);
options = IndicesOptions.lenientExpandOpen();
verify(snapshot("snap2", "test1", "test2").setIndicesOptions(options), false);
verify(restore("snap2", "test1", "test2").setIndicesOptions(options), false);
options = IndicesOptions.strictExpandOpen();
createIndex("test2");
//TODO: temporary work-around for #5531
ensureGreen("test2");
waitForRelocation();
verify(snapshot("snap3", "test1", "test2").setIndicesOptions(options), false);
verify(restore("snap3", "test1", "test2").setIndicesOptions(options), false);
}
@Test
public void testWildcardBehaviour() throws Exception {
// Verify defaults for wildcards, when specifying no indices (*, _all, /)
String[] indices = Strings.EMPTY_ARRAY;
verify(search(indices), false);
verify(msearch(null, indices), false);
verify(count(indices), false);
verify(clearCache(indices), false);
verify(_flush(indices),false);
verify(segments(indices), false);
verify(stats(indices), false);
verify(status(indices), false);
verify(optimize(indices), false);
verify(refresh(indices), false);
verify(validateQuery(indices), true);
verify(aliasExists(indices), false);
verify(typesExists(indices), false);
verify(deleteByQuery(indices), true);
verify(percolate(indices), false);
verify(mpercolate(null, indices), false);
verify(suggest(indices), false);
verify(getAliases(indices), false);
verify(getFieldMapping(indices), false);
verify(getMapping(indices), false);
verify(getWarmer(indices), false);
verify(getSettings(indices), false);
// Now force allow_no_indices=true
IndicesOptions options = IndicesOptions.fromOptions(false, true, true, false);
verify(search(indices).setIndicesOptions(options), false);
verify(msearch(options, indices).setIndicesOptions(options), false);
verify(count(indices).setIndicesOptions(options), false);
verify(clearCache(indices).setIndicesOptions(options), false);
verify(_flush(indices).setIndicesOptions(options),false);
verify(segments(indices).setIndicesOptions(options), false);
verify(stats(indices).setIndicesOptions(options), false);
verify(status(indices).setIndicesOptions(options), false);
verify(optimize(indices).setIndicesOptions(options), false);
verify(refresh(indices).setIndicesOptions(options), false);
verify(validateQuery(indices).setIndicesOptions(options), false);
verify(aliasExists(indices).setIndicesOptions(options), false);
verify(typesExists(indices).setIndicesOptions(options), false);
verify(deleteByQuery(indices).setIndicesOptions(options), false);
verify(percolate(indices).setIndicesOptions(options), false);
verify(mpercolate(options, indices), false);
verify(suggest(indices).setIndicesOptions(options), false);
verify(getAliases(indices).setIndicesOptions(options), false);
verify(getFieldMapping(indices).setIndicesOptions(options), false);
verify(getMapping(indices).setIndicesOptions(options), false);
verify(getWarmer(indices).setIndicesOptions(options), false);
verify(getSettings(indices).setIndicesOptions(options), false);
assertAcked(prepareCreate("foobar"));
client().prepareIndex("foobar", "type", "1").setSource("k", "v").setRefresh(true).execute().actionGet();
// Verify defaults for wildcards, with one wildcard expression and one existing index
indices = new String[]{"foo*"};
verify(search(indices), false, 1);
verify(msearch(null, indices), false, 1);
verify(count(indices), false, 1);
verify(clearCache(indices), false);
verify(_flush(indices),false);
verify(segments(indices), false);
verify(stats(indices), false);
verify(status(indices), false);
verify(optimize(indices), false);
verify(refresh(indices), false);
verify(validateQuery(indices), false);
verify(aliasExists(indices), false);
verify(typesExists(indices), false);
verify(deleteByQuery(indices), false);
verify(percolate(indices), false);
verify(mpercolate(null, indices), false);
verify(suggest(indices), false);
verify(getAliases(indices), false);
verify(getFieldMapping(indices), false);
verify(getMapping(indices), false);
verify(getWarmer(indices), false);
verify(getSettings(indices).setIndicesOptions(options), false);
// Verify defaults for wildcards, with two wildcard expression and one existing index
indices = new String[]{"foo*", "bar*"};
verify(search(indices), false, 1);
verify(msearch(null, indices), false, 1);
verify(count(indices), false, 1);
verify(clearCache(indices), false);
verify(_flush(indices),false);
verify(segments(indices), false);
verify(stats(indices), false);
verify(status(indices), false);
verify(optimize(indices), false);
verify(refresh(indices), false);
verify(validateQuery(indices), true);
verify(aliasExists(indices), false);
verify(typesExists(indices), false);
verify(deleteByQuery(indices), true);
verify(percolate(indices), false);
verify(mpercolate(null, indices), false);
verify(suggest(indices), false);
verify(getAliases(indices), false);
verify(getFieldMapping(indices), false);
verify(getMapping(indices), false);
verify(getWarmer(indices), false);
verify(getSettings(indices).setIndicesOptions(options), false);
// Now force allow_no_indices=true
options = IndicesOptions.fromOptions(false, true, true, false);
verify(search(indices).setIndicesOptions(options), false, 1);
verify(msearch(options, indices).setIndicesOptions(options), false, 1);
verify(count(indices).setIndicesOptions(options), false, 1);
verify(clearCache(indices).setIndicesOptions(options), false);
verify(_flush(indices).setIndicesOptions(options),false);
verify(segments(indices).setIndicesOptions(options), false);
verify(stats(indices).setIndicesOptions(options), false);
verify(status(indices).setIndicesOptions(options), false);
verify(optimize(indices).setIndicesOptions(options), false);
verify(refresh(indices).setIndicesOptions(options), false);
verify(validateQuery(indices).setIndicesOptions(options), false);
verify(aliasExists(indices).setIndicesOptions(options), false);
verify(typesExists(indices).setIndicesOptions(options), false);
verify(deleteByQuery(indices).setIndicesOptions(options), false);
verify(percolate(indices).setIndicesOptions(options), false);
verify(mpercolate(options, indices), false);
verify(suggest(indices).setIndicesOptions(options), false);
verify(getAliases(indices).setIndicesOptions(options), false);
verify(getFieldMapping(indices).setIndicesOptions(options), false);
verify(getMapping(indices).setIndicesOptions(options), false);
verify(getWarmer(indices).setIndicesOptions(options), false);
verify(getSettings(indices).setIndicesOptions(options), false);
}
@Test
public void testWildcardBehaviour_snapshotRestore() throws Exception {
createIndex("foobar");
ensureGreen("foobar");
waitForRelocation();
PutRepositoryResponse putRepositoryResponse = client().admin().cluster().preparePutRepository("dummy-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", randomRepoPath())).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
client().admin().cluster().prepareCreateSnapshot("dummy-repo", "snap1").setWaitForCompletion(true).get();
IndicesOptions options = IndicesOptions.fromOptions(false, false, true, false);
verify(snapshot("snap2", "foo*", "bar*").setIndicesOptions(options), true);
verify(restore("snap1", "foo*", "bar*").setIndicesOptions(options), true);
options = IndicesOptions.strictExpandOpen();
verify(snapshot("snap2", "foo*", "bar*").setIndicesOptions(options), false);
verify(restore("snap2", "foo*", "bar*").setIndicesOptions(options), false);
assertAcked(prepareCreate("barbaz"));
//TODO: temporary work-around for #5531
ensureGreen("barbaz");
waitForRelocation();
options = IndicesOptions.fromOptions(false, false, true, false);
verify(snapshot("snap3", "foo*", "bar*").setIndicesOptions(options), false);
verify(restore("snap3", "foo*", "bar*").setIndicesOptions(options), false);
options = IndicesOptions.fromOptions(false, false, true, false);
verify(snapshot("snap4", "foo*", "baz*").setIndicesOptions(options), true);
verify(restore("snap3", "foo*", "baz*").setIndicesOptions(options), true);
}
@Test
public void testAllMissing_lenient() throws Exception {
createIndex("test1");
client().prepareIndex("test1", "type", "1").setSource("k", "v").setRefresh(true).execute().actionGet();
SearchResponse response = client().prepareSearch("test2")
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setQuery(matchAllQuery())
.execute().actionGet();
assertHitCount(response, 0l);
response = client().prepareSearch("test2","test3").setQuery(matchAllQuery())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.execute().actionGet();
assertHitCount(response, 0l);
//you should still be able to run empty searches without things blowing up
response = client().prepareSearch()
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setQuery(matchAllQuery())
.execute().actionGet();
assertHitCount(response, 1l);
}
@Test
public void testAllMissing_strict() throws Exception {
createIndex("test1");
ensureYellow();
try {
client().prepareSearch("test2")
.setQuery(matchAllQuery())
.execute().actionGet();
fail("Exception should have been thrown.");
} catch (IndexMissingException e) {
}
try {
client().prepareSearch("test2","test3")
.setQuery(matchAllQuery())
.execute().actionGet();
fail("Exception should have been thrown.");
} catch (IndexMissingException e) {
}
//you should still be able to run empty searches without things blowing up
client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet();
}
@Test
// For now don't handle closed indices
public void testCloseApi_specifiedIndices() throws Exception {
createIndex("test1", "test2");
ensureYellow();
verify(search("test1", "test2"), false);
verify(count("test1", "test2"), false);
assertAcked(client().admin().indices().prepareClose("test2").get());
verify(search("test1", "test2"), true);
verify(count("test1", "test2"), true);
IndicesOptions options = IndicesOptions.fromOptions(true, true, true, false, IndicesOptions.strictExpandOpenAndForbidClosed());
verify(search("test1", "test2").setIndicesOptions(options), false);
verify(count("test1", "test2").setIndicesOptions(options), false);
verify(search(), false);
verify(count(), false);
verify(search("t*"), false);
verify(count("t*"), false);
}
@Test
public void testCloseApi_wildcards() throws Exception {
createIndex("foo", "foobar", "bar", "barbaz");
ensureYellow();
verify(client().admin().indices().prepareClose("bar*"), false);
verify(client().admin().indices().prepareClose("bar*"), true);
verify(client().admin().indices().prepareClose("foo*"), false);
verify(client().admin().indices().prepareClose("foo*"), true);
verify(client().admin().indices().prepareClose("_all"), true);
verify(client().admin().indices().prepareOpen("bar*"), false);
verify(client().admin().indices().prepareOpen("_all"), false);
verify(client().admin().indices().prepareOpen("_all"), true);
}
@Test
public void testDeleteIndex() throws Exception {
createIndex("foobar");
ensureYellow();
verify(client().admin().indices().prepareDelete("foo"), true);
assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(true));
verify(client().admin().indices().prepareDelete("foobar"), false);
assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(false));
}
@Test
public void testDeleteIndex_wildcard() throws Exception {
verify(client().admin().indices().prepareDelete("_all"), false);
createIndex("foo", "foobar", "bar", "barbaz");
ensureYellow();
verify(client().admin().indices().prepareDelete("foo*"), false);
assertThat(client().admin().indices().prepareExists("foo").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("bar").get().isExists(), equalTo(true));
assertThat(client().admin().indices().prepareExists("barbaz").get().isExists(), equalTo(true));
verify(client().admin().indices().prepareDelete("foo*"), false);
verify(client().admin().indices().prepareDelete("_all"), false);
assertThat(client().admin().indices().prepareExists("foo").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("bar").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareExists("barbaz").get().isExists(), equalTo(false));
}
@Test
public void testDeleteMapping() throws Exception {
assertAcked(prepareCreate("foobar").addMapping("type1", "field", "type=string"));
ensureGreen();
verify(client().admin().indices().prepareDeleteMapping("foo").setType("type1"), true);
assertThat(client().admin().indices().prepareTypesExists("foobar").setTypes("type1").get().isExists(), equalTo(true));
verify(client().admin().indices().prepareDeleteMapping("foobar").setType("type1"), false);
assertThat(client().admin().indices().prepareTypesExists("foobar").setTypes("type1").get().isExists(), equalTo(false));
}
@Test
public void testDeleteMapping_wildcard() throws Exception {
verify(client().admin().indices().prepareDeleteMapping("_all").setType("type1"), true);
assertAcked(prepareCreate("foo").addMapping("type1", "field", "type=string"));
assertAcked(prepareCreate("foobar").addMapping("type1", "field", "type=string"));
assertAcked(prepareCreate("bar").addMapping("type1", "field", "type=string"));
assertAcked(prepareCreate("barbaz").addMapping("type1", "field", "type=string"));
// we wait for green to make sure indices with mappings have been created on all relevant
// nodes, and that recovery won't re-introduce a mapping
ensureGreen();
verify(client().admin().indices().prepareDeleteMapping("foo*").setType("type1"), false);
assertThat(client().admin().indices().prepareTypesExists("foo").setTypes("type1").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareTypesExists("foobar").setTypes("type1").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareTypesExists("bar").setTypes("type1").get().isExists(), equalTo(true));
assertThat(client().admin().indices().prepareTypesExists("barbaz").setTypes("type1").get().isExists(), equalTo(true));
assertAcked(client().admin().indices().prepareDelete("foo*"));
verify(client().admin().indices().prepareDeleteMapping("foo*").setType("type1"), true);
verify(client().admin().indices().prepareDeleteMapping("_all").setType("type1"), false);
assertThat(client().admin().indices().prepareTypesExists("bar").setTypes("type1").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareTypesExists("barbaz").setTypes("type1").get().isExists(), equalTo(false));
}
@Test
public void testPutWarmer() throws Exception {
createIndex("foobar");
ensureYellow();
verify(client().admin().indices().preparePutWarmer("warmer1").setSearchRequest(client().prepareSearch().setIndices("foobar").setQuery(QueryBuilders.matchAllQuery())), false);
assertThat(client().admin().indices().prepareGetWarmers("foobar").setWarmers("warmer1").get().getWarmers().size(), equalTo(1));
}
@Test
public void testPutWarmer_wildcard() throws Exception {
createIndex("foo", "foobar", "bar", "barbaz");
ensureYellow();
verify(client().admin().indices().preparePutWarmer("warmer1").setSearchRequest(client().prepareSearch().setIndices("foo*").setQuery(QueryBuilders.matchAllQuery())), false);
assertThat(client().admin().indices().prepareGetWarmers("foo").setWarmers("warmer1").get().getWarmers().size(), equalTo(1));
assertThat(client().admin().indices().prepareGetWarmers("foobar").setWarmers("warmer1").get().getWarmers().size(), equalTo(1));
assertThat(client().admin().indices().prepareGetWarmers("bar").setWarmers("warmer1").get().getWarmers().size(), equalTo(0));
assertThat(client().admin().indices().prepareGetWarmers("barbaz").setWarmers("warmer1").get().getWarmers().size(), equalTo(0));
verify(client().admin().indices().preparePutWarmer("warmer2").setSearchRequest(client().prepareSearch().setIndices().setQuery(QueryBuilders.matchAllQuery())), false);
assertThat(client().admin().indices().prepareGetWarmers("foo").setWarmers("warmer2").get().getWarmers().size(), equalTo(1));
assertThat(client().admin().indices().prepareGetWarmers("foobar").setWarmers("warmer2").get().getWarmers().size(), equalTo(1));
assertThat(client().admin().indices().prepareGetWarmers("bar").setWarmers("warmer2").get().getWarmers().size(), equalTo(1));
assertThat(client().admin().indices().prepareGetWarmers("barbaz").setWarmers("warmer2").get().getWarmers().size(), equalTo(1));
}
@Test
public void testPutAlias() throws Exception {
createIndex("foobar");
ensureYellow();
verify(client().admin().indices().prepareAliases().addAlias("foobar", "foobar_alias"), false);
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foobar").get().exists(), equalTo(true));
}
@Test
public void testPutAlias_wildcard() throws Exception {
createIndex("foo", "foobar", "bar", "barbaz");
ensureYellow();
verify(client().admin().indices().prepareAliases().addAlias("foo*", "foobar_alias"), false);
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foo").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foobar").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("bar").get().exists(), equalTo(false));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("barbaz").get().exists(), equalTo(false));
verify(client().admin().indices().prepareAliases().addAlias("*", "foobar_alias"), false);
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foo").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foobar").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("bar").get().exists(), equalTo(true));
assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("barbaz").get().exists(), equalTo(true));
}
@Test
public void testDeleteMapping_typeWildcard() throws Exception {
verify(client().admin().indices().prepareDeleteMapping("_all").setType("type1"), true);
assertAcked(prepareCreate("foo").addMapping("type1", "field", "type=string"));
assertAcked(prepareCreate("foobar").addMapping("type2", "field", "type=string"));
assertAcked(prepareCreate("bar").addMapping("type3", "field", "type=string"));
assertAcked(prepareCreate("barbaz").addMapping("type4", "field", "type=string"));
ensureGreen();
assertThat(client().admin().indices().prepareTypesExists("foo").setTypes("type1").get().isExists(), equalTo(true));
assertThat(client().admin().indices().prepareTypesExists("foobar").setTypes("type2").get().isExists(), equalTo(true));
assertThat(client().admin().indices().prepareTypesExists("bar").setTypes("type3").get().isExists(), equalTo(true));
assertThat(client().admin().indices().prepareTypesExists("barbaz").setTypes("type4").get().isExists(), equalTo(true));
verify(client().admin().indices().prepareDeleteMapping("foo*").setType("type*"), false);
assertThat(client().admin().indices().prepareTypesExists("foo").setTypes("type1").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareTypesExists("foobar").setTypes("type2").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareTypesExists("bar").setTypes("type3").get().isExists(), equalTo(true));
assertThat(client().admin().indices().prepareTypesExists("barbaz").setTypes("type4").get().isExists(), equalTo(true));
assertAcked(client().admin().indices().prepareDelete("foo*"));
verify(client().admin().indices().prepareDeleteMapping("foo*").setType("type1"), true);
verify(client().admin().indices().prepareDeleteMapping("_all").setType("type3", "type4"), false);
assertThat(client().admin().indices().prepareTypesExists("bar").setTypes("type3").get().isExists(), equalTo(false));
assertThat(client().admin().indices().prepareTypesExists("barbaz").setTypes("type4").get().isExists(), equalTo(false));
}
@Test
public void testDeleteWarmer() throws Exception {
IndexWarmersMetaData.Entry entry = new IndexWarmersMetaData.Entry(
"test1", new String[]{"typ1"}, false, new BytesArray("{\"query\" : { \"match_all\" : {}}}")
);
assertAcked(prepareCreate("foobar").addCustom(new IndexWarmersMetaData(entry)));
ensureYellow();
verify(client().admin().indices().prepareDeleteWarmer().setIndices("foo").setNames("test1"), true);
assertThat(client().admin().indices().prepareGetWarmers("foobar").setWarmers("test1").get().getWarmers().size(), equalTo(1));
verify(client().admin().indices().prepareDeleteWarmer().setIndices("foobar").setNames("test1"), false);
assertThat(client().admin().indices().prepareGetWarmers("foobar").setWarmers("test1").get().getWarmers().size(), equalTo(0));
}
@Test
public void testDeleteWarmer_wildcard() throws Exception {
verify(client().admin().indices().prepareDeleteWarmer().setIndices("_all").setNames("test1"), true);
IndexWarmersMetaData.Entry entry = new IndexWarmersMetaData.Entry(
"test1", new String[]{"type1"}, false, new BytesArray("{\"query\" : { \"match_all\" : {}}}")
);
assertAcked(prepareCreate("foo").addCustom(new IndexWarmersMetaData(entry)));
assertAcked(prepareCreate("foobar").addCustom(new IndexWarmersMetaData(entry)));
assertAcked(prepareCreate("bar").addCustom(new IndexWarmersMetaData(entry)));
assertAcked(prepareCreate("barbaz").addCustom(new IndexWarmersMetaData(entry)));
ensureYellow();
verify(client().admin().indices().prepareDeleteWarmer().setIndices("foo*").setNames("test1"), false);
assertThat(client().admin().indices().prepareGetWarmers("foo").setWarmers("test1").get().getWarmers().size(), equalTo(0));
assertThat(client().admin().indices().prepareGetWarmers("foobar").setWarmers("test1").get().getWarmers().size(), equalTo(0));
assertThat(client().admin().indices().prepareGetWarmers("bar").setWarmers("test1").get().getWarmers().size(), equalTo(1));
assertThat(client().admin().indices().prepareGetWarmers("barbaz").setWarmers("test1").get().getWarmers().size(), equalTo(1));
assertAcked(client().admin().indices().prepareDelete("foo*"));
verify(client().admin().indices().prepareDeleteWarmer().setIndices("foo*").setNames("test1"), true);
verify(client().admin().indices().prepareDeleteWarmer().setIndices("_all").setNames("test1"), false);
assertThat(client().admin().indices().prepareGetWarmers("bar").setWarmers("test1").get().getWarmers().size(), equalTo(0));
assertThat(client().admin().indices().prepareGetWarmers("barbaz").setWarmers("test1").get().getWarmers().size(), equalTo(0));
}
@Test
public void testPutMapping() throws Exception {
verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=string"), true);
verify(client().admin().indices().preparePutMapping("_all").setType("type1").setSource("field", "type=string"), true);
createIndex("foo", "foobar", "bar", "barbaz");
ensureYellow();
verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=string"), false);
assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type1"), notNullValue());
verify(client().admin().indices().preparePutMapping("b*").setType("type1").setSource("field", "type=string"), false);
assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type1"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type1"), notNullValue());
verify(client().admin().indices().preparePutMapping("_all").setType("type2").setSource("field", "type=string"), false);
assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type2"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type2"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type2"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type2"), notNullValue());
verify(client().admin().indices().preparePutMapping().setType("type3").setSource("field", "type=string"), false);
assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type3"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type3"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type3"), notNullValue());
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type3"), notNullValue());
verify(client().admin().indices().preparePutMapping("c*").setType("type1").setSource("field", "type=string"), true);
assertAcked(client().admin().indices().prepareClose("barbaz").get());
verify(client().admin().indices().preparePutMapping("barbaz").setType("type4").setSource("field", "type=string"), false);
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type4"), notNullValue());
}
@Test
public void testUpdateSettings() throws Exception {
verify(client().admin().indices().prepareUpdateSettings("foo").setSettings(ImmutableSettings.builder().put("a", "b")), true);
verify(client().admin().indices().prepareUpdateSettings("_all").setSettings(ImmutableSettings.builder().put("a", "b")), true);
createIndex("foo", "foobar", "bar", "barbaz");
ensureYellow();
assertAcked(client().admin().indices().prepareClose("_all").get());
verify(client().admin().indices().prepareUpdateSettings("foo").setSettings(ImmutableSettings.builder().put("a", "b")), false);
verify(client().admin().indices().prepareUpdateSettings("bar*").setSettings(ImmutableSettings.builder().put("a", "b")), false);
verify(client().admin().indices().prepareUpdateSettings("_all").setSettings(ImmutableSettings.builder().put("c", "d")), false);
GetSettingsResponse settingsResponse = client().admin().indices().prepareGetSettings("foo").get();
assertThat(settingsResponse.getSetting("foo", "index.a"), equalTo("b"));
settingsResponse = client().admin().indices().prepareGetSettings("bar*").get();
assertThat(settingsResponse.getSetting("bar", "index.a"), equalTo("b"));
assertThat(settingsResponse.getSetting("barbaz", "index.a"), equalTo("b"));
settingsResponse = client().admin().indices().prepareGetSettings("_all").get();
assertThat(settingsResponse.getSetting("foo", "index.c"), equalTo("d"));
assertThat(settingsResponse.getSetting("foobar", "index.c"), equalTo("d"));
assertThat(settingsResponse.getSetting("bar", "index.c"), equalTo("d"));
assertThat(settingsResponse.getSetting("barbaz", "index.c"), equalTo("d"));
assertAcked(client().admin().indices().prepareOpen("_all").get());
try {
verify(client().admin().indices().prepareUpdateSettings("barbaz").setSettings(ImmutableSettings.builder().put("e", "f")), false);
} catch (ElasticsearchIllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("Can't update non dynamic settings[[index.e]] for open indices [[barbaz]]"));
}
verify(client().admin().indices().prepareUpdateSettings("baz*").setSettings(ImmutableSettings.builder().put("a", "b")), true);
}
private static SearchRequestBuilder search(String... indices) {
return client().prepareSearch(indices).setQuery(matchAllQuery());
}
private static MultiSearchRequestBuilder msearch(IndicesOptions options, String... indices) {
MultiSearchRequestBuilder multiSearchRequestBuilder = client().prepareMultiSearch();
if (options != null) {
multiSearchRequestBuilder.setIndicesOptions(options);
}
return multiSearchRequestBuilder.add(client().prepareSearch(indices).setQuery(matchAllQuery()));
}
private static CountRequestBuilder count(String... indices) {
return client().prepareCount(indices).setQuery(matchAllQuery());
}
private static ClearIndicesCacheRequestBuilder clearCache(String... indices) {
return client().admin().indices().prepareClearCache(indices);
}
private static FlushRequestBuilder _flush(String... indices) {
return client().admin().indices().prepareFlush(indices);
}
private static IndicesSegmentsRequestBuilder segments(String... indices) {
return client().admin().indices().prepareSegments(indices);
}
private static IndicesStatsRequestBuilder stats(String... indices) {
return client().admin().indices().prepareStats(indices);
}
private static IndicesStatusRequestBuilder status(String... indices) {
return client().admin().indices().prepareStatus(indices);
}
private static OptimizeRequestBuilder optimize(String... indices) {
return client().admin().indices().prepareOptimize(indices);
}
private static RefreshRequestBuilder refresh(String... indices) {
return client().admin().indices().prepareRefresh(indices);
}
private static ValidateQueryRequestBuilder validateQuery(String... indices) {
return client().admin().indices().prepareValidateQuery(indices);
}
private static AliasesExistRequestBuilder aliasExists(String... indices) {
return client().admin().indices().prepareAliasesExist("dummy").addIndices(indices);
}
private static TypesExistsRequestBuilder typesExists(String... indices) {
return client().admin().indices().prepareTypesExists(indices).setTypes("dummy");
}
private static DeleteByQueryRequestBuilder deleteByQuery(String... indices) {
return client().prepareDeleteByQuery(indices).setQuery(boolQuery().mustNot(matchAllQuery()));
}
private static PercolateRequestBuilder percolate(String... indices) {
return client().preparePercolate().setIndices(indices)
.setSource(new PercolateSourceBuilder().setDoc(docBuilder().setDoc("k", "v")))
.setDocumentType("type");
}
private static MultiPercolateRequestBuilder mpercolate(IndicesOptions options, String... indices) {
MultiPercolateRequestBuilder builder = client().prepareMultiPercolate();
if (options != null) {
builder.setIndicesOptions(options);
}
return builder.add(percolate(indices));
}
private static SuggestRequestBuilder suggest(String... indices) {
return client().prepareSuggest(indices).addSuggestion(SuggestBuilders.termSuggestion("name").field("a"));
}
private static GetAliasesRequestBuilder getAliases(String... indices) {
return client().admin().indices().prepareGetAliases("dummy").addIndices(indices);
}
private static GetFieldMappingsRequestBuilder getFieldMapping(String... indices) {
return client().admin().indices().prepareGetFieldMappings(indices);
}
private static GetMappingsRequestBuilder getMapping(String... indices) {
return client().admin().indices().prepareGetMappings(indices);
}
private static GetWarmersRequestBuilder getWarmer(String... indices) {
return client().admin().indices().prepareGetWarmers(indices);
}
private static GetSettingsRequestBuilder getSettings(String... indices) {
return client().admin().indices().prepareGetSettings(indices);
}
private static CreateSnapshotRequestBuilder snapshot(String name, String... indices) {
return client().admin().cluster().prepareCreateSnapshot("dummy-repo", name).setWaitForCompletion(true).setIndices(indices);
}
private static RestoreSnapshotRequestBuilder restore(String name, String... indices) {
return client().admin().cluster().prepareRestoreSnapshot("dummy-repo", name)
.setRenamePattern("(.+)").setRenameReplacement("$1-copy-" + name)
.setWaitForCompletion(true)
.setIndices(indices);
}
private static void verify(ActionRequestBuilder requestBuilder, boolean fail) {
verify(requestBuilder, fail, 0);
}
private static void verify(ActionRequestBuilder requestBuilder, boolean fail, long expectedCount) {
if (fail) {
if (requestBuilder instanceof MultiSearchRequestBuilder) {
MultiSearchResponse multiSearchResponse = ((MultiSearchRequestBuilder) requestBuilder).get();
assertThat(multiSearchResponse.getResponses().length, equalTo(1));
assertThat(multiSearchResponse.getResponses()[0].getResponse(), nullValue());
} else {
try {
requestBuilder.get();
fail("IndexMissingException or IndexClosedException was expected");
} catch (IndexMissingException | IndexClosedException e) {}
}
} else {
if (requestBuilder instanceof SearchRequestBuilder) {
SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) requestBuilder;
assertHitCount(searchRequestBuilder.get(), expectedCount);
} else if (requestBuilder instanceof CountRequestBuilder) {
CountRequestBuilder countRequestBuilder = (CountRequestBuilder) requestBuilder;
assertHitCount(countRequestBuilder.get(), expectedCount);
} else if (requestBuilder instanceof MultiSearchRequestBuilder) {
MultiSearchResponse multiSearchResponse = ((MultiSearchRequestBuilder) requestBuilder).get();
assertThat(multiSearchResponse.getResponses().length, equalTo(1));
assertThat(multiSearchResponse.getResponses()[0].getResponse(), notNullValue());
} else {
requestBuilder.get();
}
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.apimanagement.v2018_06_01_preview.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.management.apimanagement.v2018_06_01_preview.DelegationSettingsGetEntityTagHeaders;
import com.microsoft.azure.management.apimanagement.v2018_06_01_preview.DelegationSettingsGetHeaders;
import com.microsoft.azure.management.apimanagement.v2018_06_01_preview.ErrorResponseException;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.ServiceResponseWithHeaders;
import com.microsoft.rest.Validator;
import java.io.IOException;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.HEAD;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.PATCH;
import retrofit2.http.Path;
import retrofit2.http.PUT;
import retrofit2.http.Query;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in DelegationSettings.
*/
public class DelegationSettingsInner {
/** The Retrofit service to perform REST calls. */
private DelegationSettingsService service;
/** The service client containing this operation class. */
private ApiManagementClientImpl client;
/**
* Initializes an instance of DelegationSettingsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public DelegationSettingsInner(Retrofit retrofit, ApiManagementClientImpl client) {
this.service = retrofit.create(DelegationSettingsService.class);
this.client = client;
}
/**
* The interface defining all the services for DelegationSettings to be
* used by Retrofit to perform actually REST calls.
*/
interface DelegationSettingsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.apimanagement.v2018_06_01_preview.DelegationSettings getEntityTag" })
@HEAD("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/portalsettings/delegation")
Observable<Response<Void>> getEntityTag(@Path("resourceGroupName") String resourceGroupName, @Path("serviceName") String serviceName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.apimanagement.v2018_06_01_preview.DelegationSettings get" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/portalsettings/delegation")
Observable<Response<ResponseBody>> get(@Path("resourceGroupName") String resourceGroupName, @Path("serviceName") String serviceName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.apimanagement.v2018_06_01_preview.DelegationSettings update" })
@PATCH("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/portalsettings/delegation")
Observable<Response<ResponseBody>> update(@Path("resourceGroupName") String resourceGroupName, @Path("serviceName") String serviceName, @Path("subscriptionId") String subscriptionId, @Body PortalDelegationSettingsInner parameters, @Header("If-Match") String ifMatch, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.apimanagement.v2018_06_01_preview.DelegationSettings createOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/portalsettings/delegation")
Observable<Response<ResponseBody>> createOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("serviceName") String serviceName, @Path("subscriptionId") String subscriptionId, @Body PortalDelegationSettingsInner parameters, @Header("If-Match") String ifMatch, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Gets the entity state (Etag) version of the DelegationSettings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void getEntityTag(String resourceGroupName, String serviceName) {
getEntityTagWithServiceResponseAsync(resourceGroupName, serviceName).toBlocking().single().body();
}
/**
* Gets the entity state (Etag) version of the DelegationSettings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> getEntityTagAsync(String resourceGroupName, String serviceName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromHeaderResponse(getEntityTagWithServiceResponseAsync(resourceGroupName, serviceName), serviceCallback);
}
/**
* Gets the entity state (Etag) version of the DelegationSettings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponseWithHeaders} object if successful.
*/
public Observable<Void> getEntityTagAsync(String resourceGroupName, String serviceName) {
return getEntityTagWithServiceResponseAsync(resourceGroupName, serviceName).map(new Func1<ServiceResponseWithHeaders<Void, DelegationSettingsGetEntityTagHeaders>, Void>() {
@Override
public Void call(ServiceResponseWithHeaders<Void, DelegationSettingsGetEntityTagHeaders> response) {
return response.body();
}
});
}
/**
* Gets the entity state (Etag) version of the DelegationSettings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponseWithHeaders} object if successful.
*/
public Observable<ServiceResponseWithHeaders<Void, DelegationSettingsGetEntityTagHeaders>> getEntityTagWithServiceResponseAsync(String resourceGroupName, String serviceName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceName == null) {
throw new IllegalArgumentException("Parameter serviceName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2018-06-01-preview";
return service.getEntityTag(resourceGroupName, serviceName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<Void>, Observable<ServiceResponseWithHeaders<Void, DelegationSettingsGetEntityTagHeaders>>>() {
@Override
public Observable<ServiceResponseWithHeaders<Void, DelegationSettingsGetEntityTagHeaders>> call(Response<Void> response) {
try {
ServiceResponseWithHeaders<Void, DelegationSettingsGetEntityTagHeaders> clientResponse = getEntityTagDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponseWithHeaders<Void, DelegationSettingsGetEntityTagHeaders> getEntityTagDelegate(Response<Void> response) throws ErrorResponseException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<Void, ErrorResponseException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorResponseException.class)
.buildEmptyWithHeaders(response, DelegationSettingsGetEntityTagHeaders.class);
}
/**
* Get Delegation Settings for the Portal.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PortalDelegationSettingsInner object if successful.
*/
public PortalDelegationSettingsInner get(String resourceGroupName, String serviceName) {
return getWithServiceResponseAsync(resourceGroupName, serviceName).toBlocking().single().body();
}
/**
* Get Delegation Settings for the Portal.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<PortalDelegationSettingsInner> getAsync(String resourceGroupName, String serviceName, final ServiceCallback<PortalDelegationSettingsInner> serviceCallback) {
return ServiceFuture.fromHeaderResponse(getWithServiceResponseAsync(resourceGroupName, serviceName), serviceCallback);
}
/**
* Get Delegation Settings for the Portal.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PortalDelegationSettingsInner object
*/
public Observable<PortalDelegationSettingsInner> getAsync(String resourceGroupName, String serviceName) {
return getWithServiceResponseAsync(resourceGroupName, serviceName).map(new Func1<ServiceResponseWithHeaders<PortalDelegationSettingsInner, DelegationSettingsGetHeaders>, PortalDelegationSettingsInner>() {
@Override
public PortalDelegationSettingsInner call(ServiceResponseWithHeaders<PortalDelegationSettingsInner, DelegationSettingsGetHeaders> response) {
return response.body();
}
});
}
/**
* Get Delegation Settings for the Portal.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PortalDelegationSettingsInner object
*/
public Observable<ServiceResponseWithHeaders<PortalDelegationSettingsInner, DelegationSettingsGetHeaders>> getWithServiceResponseAsync(String resourceGroupName, String serviceName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceName == null) {
throw new IllegalArgumentException("Parameter serviceName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2018-06-01-preview";
return service.get(resourceGroupName, serviceName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<PortalDelegationSettingsInner, DelegationSettingsGetHeaders>>>() {
@Override
public Observable<ServiceResponseWithHeaders<PortalDelegationSettingsInner, DelegationSettingsGetHeaders>> call(Response<ResponseBody> response) {
try {
ServiceResponseWithHeaders<PortalDelegationSettingsInner, DelegationSettingsGetHeaders> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponseWithHeaders<PortalDelegationSettingsInner, DelegationSettingsGetHeaders> getDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PortalDelegationSettingsInner, ErrorResponseException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PortalDelegationSettingsInner>() { }.getType())
.registerError(ErrorResponseException.class)
.buildWithHeaders(response, DelegationSettingsGetHeaders.class);
}
/**
* Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Update Delegation settings.
* @param ifMatch ETag of the Entity. ETag should match the current entity state from the header response of the GET request or it should be * for unconditional update.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void update(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters, String ifMatch) {
updateWithServiceResponseAsync(resourceGroupName, serviceName, parameters, ifMatch).toBlocking().single().body();
}
/**
* Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Update Delegation settings.
* @param ifMatch ETag of the Entity. ETag should match the current entity state from the header response of the GET request or it should be * for unconditional update.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> updateAsync(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters, String ifMatch, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(updateWithServiceResponseAsync(resourceGroupName, serviceName, parameters, ifMatch), serviceCallback);
}
/**
* Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Update Delegation settings.
* @param ifMatch ETag of the Entity. ETag should match the current entity state from the header response of the GET request or it should be * for unconditional update.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> updateAsync(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters, String ifMatch) {
return updateWithServiceResponseAsync(resourceGroupName, serviceName, parameters, ifMatch).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Update Delegation settings.
* @param ifMatch ETag of the Entity. ETag should match the current entity state from the header response of the GET request or it should be * for unconditional update.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> updateWithServiceResponseAsync(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters, String ifMatch) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceName == null) {
throw new IllegalArgumentException("Parameter serviceName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
if (ifMatch == null) {
throw new IllegalArgumentException("Parameter ifMatch is required and cannot be null.");
}
Validator.validate(parameters);
final String apiVersion = "2018-06-01-preview";
return service.update(resourceGroupName, serviceName, this.client.subscriptionId(), parameters, ifMatch, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = updateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> updateDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<Void, ErrorResponseException>newInstance(this.client.serializerAdapter())
.register(204, new TypeToken<Void>() { }.getType())
.registerError(ErrorResponseException.class)
.build(response);
}
/**
* Create or Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Create or update parameters.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PortalDelegationSettingsInner object if successful.
*/
public PortalDelegationSettingsInner createOrUpdate(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, serviceName, parameters).toBlocking().single().body();
}
/**
* Create or Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Create or update parameters.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<PortalDelegationSettingsInner> createOrUpdateAsync(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters, final ServiceCallback<PortalDelegationSettingsInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, serviceName, parameters), serviceCallback);
}
/**
* Create or Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Create or update parameters.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PortalDelegationSettingsInner object
*/
public Observable<PortalDelegationSettingsInner> createOrUpdateAsync(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, serviceName, parameters).map(new Func1<ServiceResponse<PortalDelegationSettingsInner>, PortalDelegationSettingsInner>() {
@Override
public PortalDelegationSettingsInner call(ServiceResponse<PortalDelegationSettingsInner> response) {
return response.body();
}
});
}
/**
* Create or Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Create or update parameters.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PortalDelegationSettingsInner object
*/
public Observable<ServiceResponse<PortalDelegationSettingsInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceName == null) {
throw new IllegalArgumentException("Parameter serviceName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
final String apiVersion = "2018-06-01-preview";
final String ifMatch = null;
return service.createOrUpdate(resourceGroupName, serviceName, this.client.subscriptionId(), parameters, ifMatch, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<PortalDelegationSettingsInner>>>() {
@Override
public Observable<ServiceResponse<PortalDelegationSettingsInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PortalDelegationSettingsInner> clientResponse = createOrUpdateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
/**
* Create or Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Create or update parameters.
* @param ifMatch ETag of the Entity. Not required when creating an entity, but required when updating an entity.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PortalDelegationSettingsInner object if successful.
*/
public PortalDelegationSettingsInner createOrUpdate(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters, String ifMatch) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, serviceName, parameters, ifMatch).toBlocking().single().body();
}
/**
* Create or Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Create or update parameters.
* @param ifMatch ETag of the Entity. Not required when creating an entity, but required when updating an entity.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<PortalDelegationSettingsInner> createOrUpdateAsync(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters, String ifMatch, final ServiceCallback<PortalDelegationSettingsInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, serviceName, parameters, ifMatch), serviceCallback);
}
/**
* Create or Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Create or update parameters.
* @param ifMatch ETag of the Entity. Not required when creating an entity, but required when updating an entity.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PortalDelegationSettingsInner object
*/
public Observable<PortalDelegationSettingsInner> createOrUpdateAsync(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters, String ifMatch) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, serviceName, parameters, ifMatch).map(new Func1<ServiceResponse<PortalDelegationSettingsInner>, PortalDelegationSettingsInner>() {
@Override
public PortalDelegationSettingsInner call(ServiceResponse<PortalDelegationSettingsInner> response) {
return response.body();
}
});
}
/**
* Create or Update Delegation settings.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param parameters Create or update parameters.
* @param ifMatch ETag of the Entity. Not required when creating an entity, but required when updating an entity.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PortalDelegationSettingsInner object
*/
public Observable<ServiceResponse<PortalDelegationSettingsInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String serviceName, PortalDelegationSettingsInner parameters, String ifMatch) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceName == null) {
throw new IllegalArgumentException("Parameter serviceName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
final String apiVersion = "2018-06-01-preview";
return service.createOrUpdate(resourceGroupName, serviceName, this.client.subscriptionId(), parameters, ifMatch, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<PortalDelegationSettingsInner>>>() {
@Override
public Observable<ServiceResponse<PortalDelegationSettingsInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PortalDelegationSettingsInner> clientResponse = createOrUpdateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PortalDelegationSettingsInner> createOrUpdateDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PortalDelegationSettingsInner, ErrorResponseException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PortalDelegationSettingsInner>() { }.getType())
.registerError(ErrorResponseException.class)
.build(response);
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment.filter;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.metamx.common.Pair;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.InputRowParser;
import io.druid.data.input.impl.MapInputRowParser;
import io.druid.data.input.impl.TimeAndDimsParseSpec;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.js.JavaScriptConfig;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.extraction.JavaScriptExtractionFn;
import io.druid.query.filter.BoundDimFilter;
import io.druid.query.filter.DimFilter;
import io.druid.query.ordering.StringComparators;
import io.druid.segment.IndexBuilder;
import io.druid.segment.StorageAdapter;
import org.joda.time.DateTime;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.Closeable;
import java.util.List;
import java.util.Map;
@RunWith(Parameterized.class)
public class BoundFilterTest extends BaseFilterTest
{
private static final String TIMESTAMP_COLUMN = "timestamp";
private static final InputRowParser<Map<String, Object>> PARSER = new MapInputRowParser(
new TimeAndDimsParseSpec(
new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")),
new DimensionsSpec(null, null, null)
)
);
private static final List<InputRow> ROWS = ImmutableList.of(
PARSER.parse(ImmutableMap.<String, Object>of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))),
PARSER.parse(ImmutableMap.<String, Object>of("dim0", "1", "dim1", "10", "dim2", ImmutableList.<String>of())),
PARSER.parse(ImmutableMap.<String, Object>of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))),
PARSER.parse(ImmutableMap.<String, Object>of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))),
PARSER.parse(ImmutableMap.<String, Object>of("dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))),
PARSER.parse(ImmutableMap.<String, Object>of("dim0", "5", "dim1", "abc")),
PARSER.parse(ImmutableMap.<String, Object>of("dim0", "6", "dim1", "-1000", "dim2", ImmutableList.of("a"))),
PARSER.parse(ImmutableMap.<String, Object>of("dim0", "7", "dim1", "-10.012", "dim2", ImmutableList.of("d")))
);
public BoundFilterTest(
String testName,
IndexBuilder indexBuilder,
Function<IndexBuilder, Pair<StorageAdapter, Closeable>> finisher,
boolean optimize
)
{
super(testName, ROWS, indexBuilder, finisher, optimize);
}
@AfterClass
public static void tearDown() throws Exception
{
BaseFilterTest.tearDown(BoundFilterTest.class.getName());
}
@Test
public void testLexicographicMatchEverything()
{
final List<BoundDimFilter> filters = ImmutableList.of(
new BoundDimFilter("dim0", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
new BoundDimFilter("dim1", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
new BoundDimFilter("dim2", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
new BoundDimFilter("dim3", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC)
);
for (BoundDimFilter filter : filters) {
assertFilterMatches(filter, ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
}
}
@Test
public void testLexicographicMatchNull()
{
assertFilterMatches(
new BoundDimFilter("dim0", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("0")
);
assertFilterMatches(
new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("1", "2", "5")
);
}
@Test
public void testLexicographicMatchMissingColumn()
{
assertFilterMatches(
new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
assertFilterMatches(
new BoundDimFilter("dim3", "", "", true, false, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim3", "", "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim3", "", null, false, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
assertFilterMatches(
new BoundDimFilter("dim3", null, "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
assertFilterMatches(
new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.<String>of()
);
}
@Test
public void testLexicographicMatchTooStrict()
{
assertFilterMatches(
new BoundDimFilter("dim1", "abc", "abc", true, false, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "abc", "abc", true, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "abc", "abc", false, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.<String>of()
);
}
@Test
public void testLexicographicMatchExactlySingleValue()
{
assertFilterMatches(
new BoundDimFilter("dim1", "abc", "abc", false, false, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("5")
);
}
@Test
public void testLexicographicMatchSurroundingSingleValue()
{
assertFilterMatches(
new BoundDimFilter("dim1", "ab", "abd", true, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("5")
);
}
@Test
public void testLexicographicMatchNoUpperLimit()
{
assertFilterMatches(
new BoundDimFilter("dim1", "ab", null, true, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("4", "5")
);
}
@Test
public void testLexicographicMatchNoLowerLimit()
{
assertFilterMatches(
new BoundDimFilter("dim1", null, "abd", true, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("0", "1", "2", "3", "5", "6", "7")
);
}
@Test
public void testLexicographicMatchNumbers()
{
assertFilterMatches(
new BoundDimFilter("dim1", "1", "3", false, false, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("1", "2", "3")
);
assertFilterMatches(
new BoundDimFilter("dim1", "1", "3", true, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("1", "2")
);
assertFilterMatches(
new BoundDimFilter("dim1", "-1", "3", true, true, false, null, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("1", "2", "3", "6", "7")
);
}
@Test
public void testAlphaNumericMatchNull()
{
assertFilterMatches(
new BoundDimFilter("dim0", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("0")
);
assertFilterMatches(
new BoundDimFilter("dim2", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("1", "2", "5")
);
assertFilterMatches(
new BoundDimFilter("dim3", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
}
@Test
public void testAlphaNumericMatchTooStrict()
{
assertFilterMatches(
new BoundDimFilter("dim1", "2", "2", true, false, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "2", "2", true, true, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "2", "2", false, true, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.<String>of()
);
}
@Test
public void testAlphaNumericMatchExactlySingleValue()
{
assertFilterMatches(
new BoundDimFilter("dim1", "2", "2", false, false, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("2")
);
}
@Test
public void testAlphaNumericMatchSurroundingSingleValue()
{
assertFilterMatches(
new BoundDimFilter("dim1", "1", "3", true, true, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("2")
);
}
@Test
public void testAlphaNumericMatchNoUpperLimit()
{
assertFilterMatches(
new BoundDimFilter("dim1", "1", null, true, true, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("1", "2", "4", "5", "6", "7")
);
assertFilterMatches(
new BoundDimFilter("dim1", "-1", null, true, true, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("4", "5", "6", "7")
);
}
@Test
public void testAlphaNumericMatchNoLowerLimit()
{
assertFilterMatches(
new BoundDimFilter("dim1", null, "2", true, true, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("0", "3")
);
assertFilterMatches(
new BoundDimFilter("dim1", null, "ZZZZZ", true, true, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
}
@Test
public void testAlphaNumericMatchWithNegatives()
{
assertFilterMatches(
new BoundDimFilter("dim1", "-2000", "3", true, true, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "3", "-2000", true, true, true, null, StringComparators.ALPHANUMERIC),
ImmutableList.of("1", "6", "7")
);
}
@Test
public void testNumericMatchNull()
{
assertFilterMatches(
new BoundDimFilter("dim0", "", "", false, false, false, null, StringComparators.NUMERIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "", "", false, false, false, null, StringComparators.NUMERIC),
ImmutableList.of("0")
);
assertFilterMatches(
new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.NUMERIC),
ImmutableList.of("1", "2", "5")
);
assertFilterMatches(
new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.NUMERIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
}
@Test
public void testNumericMatchTooStrict()
{
assertFilterMatches(
new BoundDimFilter("dim1", "2", "2", true, false, false, null, StringComparators.NUMERIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "2", "2", true, true, false, null, StringComparators.NUMERIC),
ImmutableList.<String>of()
);
assertFilterMatches(
new BoundDimFilter("dim1", "2", "2", false, true, false, null, StringComparators.NUMERIC),
ImmutableList.<String>of()
);
}
@Test
public void testNumericMatchExactlySingleValue()
{
assertFilterMatches(
new BoundDimFilter("dim1", "2", "2", false, false, false, null, StringComparators.NUMERIC),
ImmutableList.of("2")
);
assertFilterMatches(
new BoundDimFilter("dim1", "-10.012", "-10.012", false, false, false, null, StringComparators.NUMERIC),
ImmutableList.of("7")
);
}
@Test
public void testNumericMatchSurroundingSingleValue()
{
assertFilterMatches(
new BoundDimFilter("dim1", "1", "3", true, true, false, null, StringComparators.NUMERIC),
ImmutableList.of("2")
);
assertFilterMatches(
new BoundDimFilter("dim1", "-11", "-10", false, false, false, null, StringComparators.NUMERIC),
ImmutableList.of("7")
);
}
@Test
public void testNumericMatchNoUpperLimit()
{
assertFilterMatches(
new BoundDimFilter("dim1", "1", null, true, true, false, null, StringComparators.NUMERIC),
ImmutableList.of("1", "2")
);
}
@Test
public void testNumericMatchNoLowerLimit()
{
assertFilterMatches(
new BoundDimFilter("dim1", null, "2", true, true, false, null, StringComparators.NUMERIC),
ImmutableList.of("0", "3", "4", "5", "6", "7")
);
}
@Test
public void testNumericMatchWithNegatives()
{
assertFilterMatches(
new BoundDimFilter("dim1", "-2000", "3", true, true, false, null, StringComparators.NUMERIC),
ImmutableList.of("2", "3", "6", "7")
);
}
@Test
public void testMatchWithExtractionFn()
{
String extractionJsFn = "function(str) { return 'super-' + str; }";
ExtractionFn superFn = new JavaScriptExtractionFn(extractionJsFn, false, JavaScriptConfig.getDefault());
String nullJsFn = "function(str) { return null; }";
ExtractionFn makeNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getDefault());
assertFilterMatches(
new BoundDimFilter("dim0", "", "", false, false, false, makeNullFn, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
assertFilterMatches(
new BoundDimFilter("dim1", "super-ab", "super-abd", true, true, false, superFn, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("5")
);
assertFilterMatches(
new BoundDimFilter("dim1", "super-0", "super-10", false, false, true, superFn, StringComparators.ALPHANUMERIC),
ImmutableList.of("1", "2", "3")
);
assertFilterMatches(
new BoundDimFilter("dim2", "super-", "super-zzzzzz", false, false, false, superFn, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
assertFilterMatches(
new BoundDimFilter("dim2", "super-null", "super-null", false, false, false, superFn, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("1", "2", "5")
);
assertFilterMatches(
new BoundDimFilter("dim3", "super-null", "super-null", false, false, false, superFn, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
assertFilterMatches(
new BoundDimFilter("dim4", "super-null", "super-null", false, false, false, superFn, StringComparators.LEXICOGRAPHIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
assertFilterMatches(
new BoundDimFilter("dim2", "super-null", "super-null", false, false, false, superFn, StringComparators.NUMERIC),
ImmutableList.of("1", "2", "5")
);
assertFilterMatches(
new BoundDimFilter("dim4", "super-null", "super-null", false, false, false, superFn, StringComparators.NUMERIC),
ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
);
}
private void assertFilterMatches(
final DimFilter filter,
final List<String> expectedRows
)
{
Assert.assertEquals(filter.toString(), expectedRows, selectColumnValuesMatchingFilter(filter, "dim0"));
Assert.assertEquals(filter.toString(), expectedRows.size(), selectCountUsingFilteredAggregator(filter));
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.networkmanager.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/networkmanager-2019-07-05/CreateConnection" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateConnectionRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the global network.
* </p>
*/
private String globalNetworkId;
/**
* <p>
* The ID of the first device in the connection.
* </p>
*/
private String deviceId;
/**
* <p>
* The ID of the second device in the connection.
* </p>
*/
private String connectedDeviceId;
/**
* <p>
* The ID of the link for the first device.
* </p>
*/
private String linkId;
/**
* <p>
* The ID of the link for the second device.
* </p>
*/
private String connectedLinkId;
/**
* <p>
* A description of the connection.
* </p>
* <p>
* Length Constraints: Maximum length of 256 characters.
* </p>
*/
private String description;
/**
* <p>
* The tags to apply to the resource during creation.
* </p>
*/
private java.util.List<Tag> tags;
/**
* <p>
* The ID of the global network.
* </p>
*
* @param globalNetworkId
* The ID of the global network.
*/
public void setGlobalNetworkId(String globalNetworkId) {
this.globalNetworkId = globalNetworkId;
}
/**
* <p>
* The ID of the global network.
* </p>
*
* @return The ID of the global network.
*/
public String getGlobalNetworkId() {
return this.globalNetworkId;
}
/**
* <p>
* The ID of the global network.
* </p>
*
* @param globalNetworkId
* The ID of the global network.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateConnectionRequest withGlobalNetworkId(String globalNetworkId) {
setGlobalNetworkId(globalNetworkId);
return this;
}
/**
* <p>
* The ID of the first device in the connection.
* </p>
*
* @param deviceId
* The ID of the first device in the connection.
*/
public void setDeviceId(String deviceId) {
this.deviceId = deviceId;
}
/**
* <p>
* The ID of the first device in the connection.
* </p>
*
* @return The ID of the first device in the connection.
*/
public String getDeviceId() {
return this.deviceId;
}
/**
* <p>
* The ID of the first device in the connection.
* </p>
*
* @param deviceId
* The ID of the first device in the connection.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateConnectionRequest withDeviceId(String deviceId) {
setDeviceId(deviceId);
return this;
}
/**
* <p>
* The ID of the second device in the connection.
* </p>
*
* @param connectedDeviceId
* The ID of the second device in the connection.
*/
public void setConnectedDeviceId(String connectedDeviceId) {
this.connectedDeviceId = connectedDeviceId;
}
/**
* <p>
* The ID of the second device in the connection.
* </p>
*
* @return The ID of the second device in the connection.
*/
public String getConnectedDeviceId() {
return this.connectedDeviceId;
}
/**
* <p>
* The ID of the second device in the connection.
* </p>
*
* @param connectedDeviceId
* The ID of the second device in the connection.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateConnectionRequest withConnectedDeviceId(String connectedDeviceId) {
setConnectedDeviceId(connectedDeviceId);
return this;
}
/**
* <p>
* The ID of the link for the first device.
* </p>
*
* @param linkId
* The ID of the link for the first device.
*/
public void setLinkId(String linkId) {
this.linkId = linkId;
}
/**
* <p>
* The ID of the link for the first device.
* </p>
*
* @return The ID of the link for the first device.
*/
public String getLinkId() {
return this.linkId;
}
/**
* <p>
* The ID of the link for the first device.
* </p>
*
* @param linkId
* The ID of the link for the first device.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateConnectionRequest withLinkId(String linkId) {
setLinkId(linkId);
return this;
}
/**
* <p>
* The ID of the link for the second device.
* </p>
*
* @param connectedLinkId
* The ID of the link for the second device.
*/
public void setConnectedLinkId(String connectedLinkId) {
this.connectedLinkId = connectedLinkId;
}
/**
* <p>
* The ID of the link for the second device.
* </p>
*
* @return The ID of the link for the second device.
*/
public String getConnectedLinkId() {
return this.connectedLinkId;
}
/**
* <p>
* The ID of the link for the second device.
* </p>
*
* @param connectedLinkId
* The ID of the link for the second device.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateConnectionRequest withConnectedLinkId(String connectedLinkId) {
setConnectedLinkId(connectedLinkId);
return this;
}
/**
* <p>
* A description of the connection.
* </p>
* <p>
* Length Constraints: Maximum length of 256 characters.
* </p>
*
* @param description
* A description of the connection.</p>
* <p>
* Length Constraints: Maximum length of 256 characters.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* A description of the connection.
* </p>
* <p>
* Length Constraints: Maximum length of 256 characters.
* </p>
*
* @return A description of the connection.</p>
* <p>
* Length Constraints: Maximum length of 256 characters.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* A description of the connection.
* </p>
* <p>
* Length Constraints: Maximum length of 256 characters.
* </p>
*
* @param description
* A description of the connection.</p>
* <p>
* Length Constraints: Maximum length of 256 characters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateConnectionRequest withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The tags to apply to the resource during creation.
* </p>
*
* @return The tags to apply to the resource during creation.
*/
public java.util.List<Tag> getTags() {
return tags;
}
/**
* <p>
* The tags to apply to the resource during creation.
* </p>
*
* @param tags
* The tags to apply to the resource during creation.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new java.util.ArrayList<Tag>(tags);
}
/**
* <p>
* The tags to apply to the resource during creation.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* The tags to apply to the resource during creation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateConnectionRequest withTags(Tag... tags) {
if (this.tags == null) {
setTags(new java.util.ArrayList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* The tags to apply to the resource during creation.
* </p>
*
* @param tags
* The tags to apply to the resource during creation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateConnectionRequest withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getGlobalNetworkId() != null)
sb.append("GlobalNetworkId: ").append(getGlobalNetworkId()).append(",");
if (getDeviceId() != null)
sb.append("DeviceId: ").append(getDeviceId()).append(",");
if (getConnectedDeviceId() != null)
sb.append("ConnectedDeviceId: ").append(getConnectedDeviceId()).append(",");
if (getLinkId() != null)
sb.append("LinkId: ").append(getLinkId()).append(",");
if (getConnectedLinkId() != null)
sb.append("ConnectedLinkId: ").append(getConnectedLinkId()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateConnectionRequest == false)
return false;
CreateConnectionRequest other = (CreateConnectionRequest) obj;
if (other.getGlobalNetworkId() == null ^ this.getGlobalNetworkId() == null)
return false;
if (other.getGlobalNetworkId() != null && other.getGlobalNetworkId().equals(this.getGlobalNetworkId()) == false)
return false;
if (other.getDeviceId() == null ^ this.getDeviceId() == null)
return false;
if (other.getDeviceId() != null && other.getDeviceId().equals(this.getDeviceId()) == false)
return false;
if (other.getConnectedDeviceId() == null ^ this.getConnectedDeviceId() == null)
return false;
if (other.getConnectedDeviceId() != null && other.getConnectedDeviceId().equals(this.getConnectedDeviceId()) == false)
return false;
if (other.getLinkId() == null ^ this.getLinkId() == null)
return false;
if (other.getLinkId() != null && other.getLinkId().equals(this.getLinkId()) == false)
return false;
if (other.getConnectedLinkId() == null ^ this.getConnectedLinkId() == null)
return false;
if (other.getConnectedLinkId() != null && other.getConnectedLinkId().equals(this.getConnectedLinkId()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGlobalNetworkId() == null) ? 0 : getGlobalNetworkId().hashCode());
hashCode = prime * hashCode + ((getDeviceId() == null) ? 0 : getDeviceId().hashCode());
hashCode = prime * hashCode + ((getConnectedDeviceId() == null) ? 0 : getConnectedDeviceId().hashCode());
hashCode = prime * hashCode + ((getLinkId() == null) ? 0 : getLinkId().hashCode());
hashCode = prime * hashCode + ((getConnectedLinkId() == null) ? 0 : getConnectedLinkId().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public CreateConnectionRequest clone() {
return (CreateConnectionRequest) super.clone();
}
}
| |
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.util.json;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeSet;
import org.auraframework.util.date.DateOnly;
import org.auraframework.util.date.DateService;
import org.auraframework.util.date.DateServiceImpl;
import org.auraframework.util.javascript.Literal;
import org.auraframework.util.json.Serialization.ReferenceScope;
import org.auraframework.util.json.Serialization.ReferenceType;
import com.google.common.collect.Maps;
/**
* Some basic serializers
*/
public class JsonSerializers {
public static final LiteralSerializer LITERAL = new LiteralSerializer();
public static final StringSerializer STRING = new StringSerializer();
public static final ArraySerializer ARRAY = new ArraySerializer();
public static final CollectionSerializer COLLECTION = new CollectionSerializer();
public static final MapSerializer MAP = new MapSerializer();
public static final DateSerializer DATE = new DateSerializer();
public static final DateOnlySerializer DATEONLY = new DateOnlySerializer();
public static final CalendarSerializer CALENDAR = new CalendarSerializer();
public static final OldSerializer OLD = new OldSerializer();
public static final BigDecimalSerializer BIGDECIMAL = new BigDecimalSerializer();
/**
* two maps full of standard class to serializer mappings
*/
public static final Map<String, JsonSerializer<?>> MAPPY_FASTY;
public static final Map<Class<?>, JsonSerializer<?>> MAPPY_SLOWY;
static {
Map<Class<?>, JsonSerializer<?>> m = new LinkedHashMap<>();
m.put(ArrayList.class, COLLECTION);
m.put(LinkedList.class, COLLECTION);
m.put(HashSet.class, COLLECTION);
m.put(TreeSet.class, COLLECTION);
m.put(LinkedHashSet.class, COLLECTION);
m.put(HashMap.class, MAP);
m.put(LinkedHashMap.class, MAP);
m.put(String.class, STRING);
m.put(Character.class, STRING);
m.put(Integer.class, LITERAL);
m.put(Float.class, LITERAL);
m.put(Double.class, LITERAL);
m.put(Short.class, LITERAL);
m.put(Long.class, LITERAL);
m.put(BigDecimal.class, LITERAL);
m.put(Boolean.class, LITERAL);
m.put(Date.class, DATE);
m.put(DateOnly.class, DATEONLY);
m.put(GregorianCalendar.class, CALENDAR);
Map<String, JsonSerializer<?>> mFast = Maps.newLinkedHashMap();
for (Entry<Class<?>, JsonSerializer<?>> e : m.entrySet()) {
mFast.put(e.getKey().getName(), e.getValue());
}
MAPPY_FASTY = Collections.unmodifiableMap(mFast);
m = new LinkedHashMap<>();
m.put(Collection.class, COLLECTION); // maybe iterable
m.put(Map.class, MAP);
m.put(Number.class, LITERAL);
m.put(Calendar.class, CALENDAR);
MAPPY_SLOWY = Collections.unmodifiableMap(m);
}
public static abstract class IdentitySerializer<T> implements JsonSerializer<T> {
@Override
public final ReferenceType getReferenceType(T value) {
return ReferenceType.IDENTITY;
}
@Override
public final ReferenceScope getReferenceScope(T value) {
return ReferenceScope.ACTION;
}
}
public static abstract class NoneSerializer<T> implements JsonSerializer<T> {
@Override
public final ReferenceType getReferenceType(T value) {
return ReferenceType.NONE;
}
@Override
public final ReferenceScope getReferenceScope(T value) {
return ReferenceScope.ACTION;
}
}
/**
* temp class until all the json serializable stuff moves out of the defs
*/
public static class OldSerializer implements JsonSerializer<JsonSerializable> {
@Override
public final ReferenceType getReferenceType(JsonSerializable value) {
Serialization serialization = value.getClass().getAnnotation(Serialization.class);
if (serialization != null) {
return serialization.referenceType();
}
return ReferenceType.NONE;
}
@Override
public final ReferenceScope getReferenceScope(JsonSerializable value) {
Serialization serialization = value.getClass().getAnnotation(Serialization.class);
if (serialization != null) {
return serialization.referenceScope();
}
return ReferenceScope.ACTION;
}
@Override
public void serialize(Json json, JsonSerializable value) throws IOException {
value.serialize(json);
}
}
public static class ArraySerializer extends NoneSerializer<Object[]> {
@Override
public void serialize(Json json, Object[] value) throws IOException {
if (json.getSerializationContext().getCollectionSizeLimit() > -1
&& value.length > json.getSerializationContext().getCollectionSizeLimit()) {
json.writeString("Array of length " + value.length + " too large to display");
} else {
json.writeArray(value);
}
}
}
public static class CollectionSerializer extends NoneSerializer<Collection<?>> {
@Override
public void serialize(Json json, Collection<?> value) throws IOException {
if (json.getSerializationContext().getCollectionSizeLimit() > -1
&& value.size() > json.getSerializationContext().getCollectionSizeLimit()) {
json.writeString("Collection of size " + value.size() + " too large to display");
} else {
json.writeArray(value);
}
}
}
public static class MapSerializer extends NoneSerializer<Map<?, ?>> {
@Override
public void serialize(Json json, Map<?, ?> value) throws IOException {
if (json.getSerializationContext().getCollectionSizeLimit() > -1
&& value.size() > json.getSerializationContext().getCollectionSizeLimit()) {
json.writeString("Map of size " + value.size() + " too large to display");
} else {
json.writeMap(value);
}
}
}
public static class DateSerializer extends NoneSerializer<Date> {
@Override
public void serialize(Json json, Date value) throws IOException {
DateService dateService = DateServiceImpl.get();
String ret = dateService.getDateTimeISO8601Converter().format(value);
json.writeString(ret);
}
}
public static class DateOnlySerializer extends NoneSerializer<Date> {
@Override
public void serialize(Json json, Date value) throws IOException {
DateService dateService = DateServiceImpl.get();
String ret = dateService.getDateISO8601Converter().format(value);
json.writeString(ret);
}
}
public static class CalendarSerializer extends NoneSerializer<Calendar> {
@Override
public void serialize(Json json, Calendar value) throws IOException {
json.writeValue(value.getTime());
}
}
/**
* literal means its something that is literal in javascript
*/
public static class LiteralSerializer extends NoneSerializer<Object> {
@Override
public void serialize(Json json, Object value) throws IOException {
if (value == null) {
Literal.NULL.serialize(json);
} else {
json.writeLiteral(value);
}
}
}
public static class StringSerializer extends NoneSerializer<Object> {
@Override
public void serialize(Json json, Object value) throws IOException {
String valueStr = value.toString();
int strLength = valueStr.length();
int dataSizeLimit = json.getSerializationContext().getVariableDataSizeLimit();
if (dataSizeLimit > -1 && (strLength > dataSizeLimit)) {
valueStr = valueStr.substring(0, dataSizeLimit)
+ " (" + Integer.toString(strLength - dataSizeLimit) + " more) ...";
}
json.writeString(valueStr);
}
}
/**
* Numbers in JS are only double precision, BigDecimals can overflow and so will be serialized as strings when too large
*/
public static class BigDecimalSerializer extends NoneSerializer<BigDecimal> {
public static int MAX_PRECISION = 15;
@Override
public void serialize(Json json, BigDecimal bd) throws IOException {
if (bd.precision() > MAX_PRECISION) {
json.writeString(bd);
} else {
json.writeLiteral(bd);
}
}
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.traffic;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.util.concurrent.EventExecutor;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* This implementation of the {@link AbstractTrafficShapingHandler} is for global
* traffic shaping, that is to say a global limitation of the bandwidth, whatever
* the number of opened channels.<br><br>
*
* The general use should be as follow:<br>
* <ul>
* <li>Create your unique GlobalTrafficShapingHandler like:<br><br>
* <tt>GlobalTrafficShapingHandler myHandler = new GlobalTrafficShapingHandler(executor);</tt><br><br>
* The executor could be the underlying IO worker pool<br>
* <tt>pipeline.addLast(myHandler);</tt><br><br>
*
* <b>Note that this handler has a Pipeline Coverage of "all" which means only one such handler must be created
* and shared among all channels as the counter must be shared among all channels.</b><br><br>
*
* Other arguments can be passed like write or read limitation (in bytes/s where 0 means no limitation)
* or the check interval (in millisecond) that represents the delay between two computations of the
* bandwidth and so the call back of the doAccounting method (0 means no accounting at all).<br><br>
*
* A value of 0 means no accounting for checkInterval. If you need traffic shaping but no such accounting,
* it is recommended to set a positive value, even if it is high since the precision of the
* Traffic Shaping depends on the period where the traffic is computed. The highest the interval,
* the less precise the traffic shaping will be. It is suggested as higher value something close
* to 5 or 10 minutes.<br><br>
*
* maxTimeToWait, by default set to 15s, allows to specify an upper bound of time shaping.<br>
* </li>
* </ul><br>
*
* Be sure to call {@link #release()} once this handler is not needed anymore to release all internal resources.
* This will not shutdown the {@link EventExecutor} as it may be shared, so you need to do this by your own.
*/
@Sharable
public class GlobalTrafficShapingHandler extends AbstractTrafficShapingHandler {
private Map<Integer, List<ToSend>> messagesQueues = new HashMap<Integer, List<ToSend>>();
/**
* Create the global TrafficCounter
*/
void createGlobalTrafficCounter(ScheduledExecutorService executor) {
if (executor == null) {
throw new NullPointerException("executor");
}
TrafficCounter tc = new TrafficCounter(this, executor, "GlobalTC",
checkInterval);
setTrafficCounter(tc);
tc.start();
}
/**
* Create a new instance
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}
* @param writeLimit
* 0 or a limit in bytes/s
* @param readLimit
* 0 or a limit in bytes/s
* @param checkInterval
* The delay between two computations of performances for
* channels or 0 if no stats are to be computed
* @param maxTime
* The maximum delay to wait in case of traffic excess
*/
public GlobalTrafficShapingHandler(ScheduledExecutorService executor, long writeLimit, long readLimit,
long checkInterval, long maxTime) {
super(writeLimit, readLimit, checkInterval, maxTime);
createGlobalTrafficCounter(executor);
}
/**
* Create a new instance
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}
* @param writeLimit
* 0 or a limit in bytes/s
* @param readLimit
* 0 or a limit in bytes/s
* @param checkInterval
* The delay between two computations of performances for
* channels or 0 if no stats are to be computed
*/
public GlobalTrafficShapingHandler(ScheduledExecutorService executor, long writeLimit,
long readLimit, long checkInterval) {
super(writeLimit, readLimit, checkInterval);
createGlobalTrafficCounter(executor);
}
/**
* Create a new instance
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}
* @param writeLimit
* 0 or a limit in bytes/s
* @param readLimit
* 0 or a limit in bytes/s
*/
public GlobalTrafficShapingHandler(ScheduledExecutorService executor, long writeLimit,
long readLimit) {
super(writeLimit, readLimit);
createGlobalTrafficCounter(executor);
}
/**
* Create a new instance
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}
* @param checkInterval
* The delay between two computations of performances for
* channels or 0 if no stats are to be computed
*/
public GlobalTrafficShapingHandler(ScheduledExecutorService executor, long checkInterval) {
super(checkInterval);
createGlobalTrafficCounter(executor);
}
/**
* Create a new instance
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}
*/
public GlobalTrafficShapingHandler(EventExecutor executor) {
createGlobalTrafficCounter(executor);
}
/**
* Release all internal resources of this instance
*/
public final void release() {
if (trafficCounter != null) {
trafficCounter.stop();
}
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
Integer key = ctx.channel().hashCode();
List<ToSend> mq = new LinkedList<ToSend>();
messagesQueues.put(key, mq);
}
@Override
public synchronized void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
Integer key = ctx.channel().hashCode();
List<ToSend> mq = messagesQueues.remove(key);
if (mq != null) {
for (ToSend toSend : mq) {
if (toSend.toSend instanceof ByteBuf) {
((ByteBuf) toSend.toSend).release();
}
}
mq.clear();
}
}
private static final class ToSend {
final long date;
final Object toSend;
final ChannelPromise promise;
private ToSend(final long delay, final Object toSend, final ChannelPromise promise) {
this.date = System.currentTimeMillis() + delay;
this.toSend = toSend;
this.promise = promise;
}
}
@Override
protected synchronized void submitWrite(final ChannelHandlerContext ctx, final Object msg, final long delay,
final ChannelPromise promise) {
Integer key = ctx.channel().hashCode();
List<ToSend> messagesQueue = messagesQueues.get(key);
if (delay == 0 && (messagesQueue == null || messagesQueue.isEmpty())) {
ctx.write(msg, promise);
return;
}
final ToSend newToSend = new ToSend(delay, msg, promise);
if (messagesQueue == null) {
messagesQueue = new LinkedList<ToSend>();
messagesQueues.put(key, messagesQueue);
}
messagesQueue.add(newToSend);
final List<ToSend> mqfinal = messagesQueue;
ctx.executor().schedule(new Runnable() {
@Override
public void run() {
sendAllValid(ctx, mqfinal);
}
}, delay, TimeUnit.MILLISECONDS);
}
private synchronized void sendAllValid(final ChannelHandlerContext ctx, final List<ToSend> messagesQueue) {
while (!messagesQueue.isEmpty()) {
ToSend newToSend = messagesQueue.remove(0);
if (newToSend.date <= System.currentTimeMillis()) {
ctx.write(newToSend.toSend, newToSend.promise);
} else {
messagesQueue.add(0, newToSend);
break;
}
}
ctx.flush();
}
}
| |
package in.myinnos.alphabetsindexfastscrollrecycler;
/**
* Created by MyInnos on 31-01-2017.
*/
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Typeface;
import androidx.annotation.ColorInt;
import androidx.annotation.ColorRes;
import androidx.recyclerview.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.MotionEvent;
public class IndexFastScrollRecyclerView extends RecyclerView {
private IndexFastScrollRecyclerSection mScroller = null;
private GestureDetector mGestureDetector = null;
private boolean mEnabled = true;
public int setIndexTextSize = 12;
public float mIndexbarWidth = 20;
public float mIndexbarMargin = 5;
public int mPreviewPadding = 5;
public int mIndexBarCornerRadius = 5;
public float mIndexBarTransparentValue = (float) 0.6;
public int mIndexBarStrokeWidth = 2;
public @ColorInt
int mSetIndexBarStrokeColor = Color.BLACK;
public @ColorInt
int mIndexbarBackgroudColor = Color.BLACK;
public @ColorInt
int mIndexbarTextColor = Color.WHITE;
public @ColorInt
int indexbarHighLightTextColor = Color.BLACK;
public int mPreviewTextSize = 50;
public @ColorInt
int mPreviewBackgroudColor = Color.BLACK;
public @ColorInt
int mPreviewTextColor = Color.WHITE;
public float mPreviewTransparentValue = (float) 0.4;
public IndexFastScrollRecyclerView(Context context) {
super(context);
}
public IndexFastScrollRecyclerView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public IndexFastScrollRecyclerView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
mScroller = new IndexFastScrollRecyclerSection(context, this);
if (attrs != null) {
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.IndexFastScrollRecyclerView, 0, 0);
if (typedArray != null) {
try {
setIndexTextSize = typedArray.getInt(R.styleable.IndexFastScrollRecyclerView_setIndexTextSize, setIndexTextSize);
mIndexbarWidth = typedArray.getFloat(R.styleable.IndexFastScrollRecyclerView_setIndexbarWidth, mIndexbarWidth);
mIndexbarMargin = typedArray.getFloat(R.styleable.IndexFastScrollRecyclerView_setIndexbarMargin, mIndexbarMargin);
mPreviewPadding = typedArray.getInt(R.styleable.IndexFastScrollRecyclerView_setPreviewPadding, mPreviewPadding);
mIndexBarCornerRadius = typedArray.getInt(R.styleable.IndexFastScrollRecyclerView_setIndexBarCornerRadius, mIndexBarCornerRadius);
mIndexBarTransparentValue = typedArray.getFloat(R.styleable.IndexFastScrollRecyclerView_setIndexBarTransparentValue, mIndexBarTransparentValue);
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setIndexBarColor)) {
mIndexbarBackgroudColor = Color.parseColor(typedArray.getString(R.styleable.IndexFastScrollRecyclerView_setIndexBarColor));
}
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setIndexBarTextColor)) {
mIndexbarTextColor = Color.parseColor(typedArray.getString(R.styleable.IndexFastScrollRecyclerView_setIndexBarTextColor));
}
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setIndexBarHighlightTextColor)) {
indexbarHighLightTextColor = Color.parseColor(typedArray.getString(R.styleable.IndexFastScrollRecyclerView_setIndexBarHighlightTextColor));
}
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setIndexBarColorRes)) {
mIndexbarBackgroudColor = typedArray.getColor(R.styleable.IndexFastScrollRecyclerView_setIndexBarColorRes, mIndexbarBackgroudColor);
}
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setIndexBarTextColorRes)) {
mIndexbarTextColor = typedArray.getColor(R.styleable.IndexFastScrollRecyclerView_setIndexBarTextColorRes, mIndexbarTextColor);
}
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setIndexBarHighlightTextColorRes)) {
indexbarHighLightTextColor = typedArray.getColor(R.styleable.IndexFastScrollRecyclerView_setIndexBarHighlightTextColor, indexbarHighLightTextColor);
}
mPreviewTextSize = typedArray.getInt(R.styleable.IndexFastScrollRecyclerView_setPreviewTextSize, mPreviewTextSize);
mPreviewTransparentValue = typedArray.getFloat(R.styleable.IndexFastScrollRecyclerView_setPreviewTransparentValue, mPreviewTransparentValue);
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setPreviewColor)) {
mPreviewBackgroudColor = Color.parseColor(typedArray.getString(R.styleable.IndexFastScrollRecyclerView_setPreviewColor));
}
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setPreviewTextColor)) {
mPreviewTextColor = Color.parseColor(typedArray.getString(R.styleable.IndexFastScrollRecyclerView_setPreviewTextColor));
}
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setIndexBarStrokeWidth)) {
mIndexBarStrokeWidth = typedArray.getInt(R.styleable.IndexFastScrollRecyclerView_setIndexBarStrokeWidth, mIndexBarStrokeWidth);
}
if (typedArray.hasValue(R.styleable.IndexFastScrollRecyclerView_setIndexBarStrokeColor)) {
mSetIndexBarStrokeColor = Color.parseColor(typedArray.getString(R.styleable.IndexFastScrollRecyclerView_setIndexBarStrokeColor));
}
} finally {
typedArray.recycle();
}
}
}
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
// Overlay index bar
if (mScroller != null)
mScroller.draw(canvas);
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
if (mEnabled) {
// Intercept ListView's touch event
if (mScroller != null && mScroller.onTouchEvent(ev))
return true;
if (mGestureDetector == null) {
mGestureDetector = new GestureDetector(getContext(), new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2,
float velocityX, float velocityY) {
return super.onFling(e1, e2, velocityX, velocityY);
}
});
}
mGestureDetector.onTouchEvent(ev);
}
return super.onTouchEvent(ev);
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
if (mEnabled && mScroller != null && mScroller.contains(ev.getX(), ev.getY()))
return true;
return super.onInterceptTouchEvent(ev);
}
@Override
public void setAdapter(Adapter adapter) {
super.setAdapter(adapter);
if (mScroller != null)
mScroller.setAdapter(adapter);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
if (mScroller != null)
mScroller.onSizeChanged(w, h, oldw, oldh);
}
/**
* @param value int to set the text size of the index bar
*/
public void setIndexTextSize(int value) {
mScroller.setIndexTextSize(value);
}
/**
* @param value float to set the width of the index bar
*/
public void setIndexbarWidth(float value) {
mScroller.setIndexbarWidth(value);
}
/**
* @param value float to set the margin of the index bar
*/
public void setIndexbarMargin(float value) {
mScroller.setIndexbarMargin(value);
}
/**
* @param value int to set the preview padding
*/
public void setPreviewPadding(int value) {
mScroller.setPreviewPadding(value);
}
/**
* @param value int to set the corner radius of the index bar
*/
public void setIndexBarCornerRadius(int value) {
mScroller.setIndexBarCornerRadius(value);
}
/**
* @param value float to set the transparency value of the index bar
*/
public void setIndexBarTransparentValue(float value) {
mScroller.setIndexBarTransparentValue(value);
}
/**
* @param typeface Typeface to set the typeface of the preview & the index bar
*/
public void setTypeface(Typeface typeface) {
mScroller.setTypeface(typeface);
}
/**
* @param shown boolean to show or hide the index bar
*/
public void setIndexBarVisibility(boolean shown) {
mScroller.setIndexBarVisibility(shown);
mEnabled = shown;
}
/**
* @param shown boolean to show or hide the index bar
*/
public void setIndexBarStrokeVisibility(boolean shown) {
mScroller.setIndexBarStrokeVisibility(shown);
}
/**
* @param color The color for the index bar
*/
public void setIndexBarStrokeColor(String color) {
mScroller.setIndexBarStrokeColor(Color.parseColor(color));
}
/**
* @param value int to set the text size of the preview box
*/
public void setIndexBarStrokeWidth(int value) {
mScroller.setIndexBarStrokeWidth(value);
}
/**
* @param shown boolean to show or hide the preview
*/
public void setPreviewVisibility(boolean shown) {
mScroller.setPreviewVisibility(shown);
}
/**
* @param value int to set the text size of the preview box
*/
public void setPreviewTextSize(int value) {
mScroller.setPreviewTextSize(value);
}
/**
* @param color The color for the preview box
*/
public void setPreviewColor(@ColorRes int color) {
int colorValue = getContext().getResources().getColor(color);
mScroller.setPreviewColor(colorValue);
}
/**
* @param color The color for the preview box
*/
public void setPreviewColor(String color) {
mScroller.setPreviewColor(Color.parseColor(color));
}
/**
* @param color The text color for the preview box
*/
public void setPreviewTextColor(@ColorRes int color) {
int colorValue = getContext().getResources().getColor(color);
mScroller.setPreviewTextColor(colorValue);
}
/**
* @param value float to set the transparency value of the preview box
*/
public void setPreviewTransparentValue(float value) {
mScroller.setPreviewTransparentValue(value);
}
/**
* @param color The text color for the preview box
*/
public void setPreviewTextColor(String color) {
mScroller.setPreviewTextColor(Color.parseColor(color));
}
/**
* @param color The color for the index bar
*/
public void setIndexBarColor(String color) {
mScroller.setIndexBarColor(Color.parseColor(color));
}
/**
* @param color The color for the index bar
*/
public void setIndexBarColor(@ColorRes int color) {
int colorValue = getContext().getResources().getColor(color);
mScroller.setIndexBarColor(colorValue);
}
/**
* @param color The text color for the index bar
*/
public void setIndexBarTextColor(String color) {
mScroller.setIndexBarTextColor(Color.parseColor(color));
}
/**
* @param color The text color for the index bar
*/
public void setIndexBarTextColor(@ColorRes int color) {
int colorValue = getContext().getResources().getColor(color);
mScroller.setIndexBarTextColor(colorValue);
}
/**
* @param color The text color for the index bar
*/
public void setIndexbarHighLightTextColor(String color) {
mScroller.setIndexbarHighLightTextColor(Color.parseColor(color));
}
/**
* @param color The text color for the index bar
*/
public void setIndexbarHighLightTextColor(@ColorRes int color) {
int colorValue = getContext().getResources().getColor(color);
mScroller.setIndexbarHighLightTextColor(colorValue);
}
/**
* @param shown boolean to show or hide the index bar
*/
public void setIndexBarHighLightTextVisibility(boolean shown) {
mScroller.setIndexBarHighLightTextVisibility(shown);
}
public void updateSections() {
mScroller.updateSections();
}
}
| |
package com.google.ads.googleads.v8.services;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
* <pre>
* Service to manage conversion value rule sets.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/ads/googleads/v8/services/conversion_value_rule_set_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class ConversionValueRuleSetServiceGrpc {
private ConversionValueRuleSetServiceGrpc() {}
public static final String SERVICE_NAME = "google.ads.googleads.v8.services.ConversionValueRuleSetService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest,
com.google.ads.googleads.v8.resources.ConversionValueRuleSet> getGetConversionValueRuleSetMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetConversionValueRuleSet",
requestType = com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest.class,
responseType = com.google.ads.googleads.v8.resources.ConversionValueRuleSet.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest,
com.google.ads.googleads.v8.resources.ConversionValueRuleSet> getGetConversionValueRuleSetMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest, com.google.ads.googleads.v8.resources.ConversionValueRuleSet> getGetConversionValueRuleSetMethod;
if ((getGetConversionValueRuleSetMethod = ConversionValueRuleSetServiceGrpc.getGetConversionValueRuleSetMethod) == null) {
synchronized (ConversionValueRuleSetServiceGrpc.class) {
if ((getGetConversionValueRuleSetMethod = ConversionValueRuleSetServiceGrpc.getGetConversionValueRuleSetMethod) == null) {
ConversionValueRuleSetServiceGrpc.getGetConversionValueRuleSetMethod = getGetConversionValueRuleSetMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest, com.google.ads.googleads.v8.resources.ConversionValueRuleSet>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetConversionValueRuleSet"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v8.resources.ConversionValueRuleSet.getDefaultInstance()))
.setSchemaDescriptor(new ConversionValueRuleSetServiceMethodDescriptorSupplier("GetConversionValueRuleSet"))
.build();
}
}
}
return getGetConversionValueRuleSetMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest,
com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse> getMutateConversionValueRuleSetsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "MutateConversionValueRuleSets",
requestType = com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest.class,
responseType = com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest,
com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse> getMutateConversionValueRuleSetsMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest, com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse> getMutateConversionValueRuleSetsMethod;
if ((getMutateConversionValueRuleSetsMethod = ConversionValueRuleSetServiceGrpc.getMutateConversionValueRuleSetsMethod) == null) {
synchronized (ConversionValueRuleSetServiceGrpc.class) {
if ((getMutateConversionValueRuleSetsMethod = ConversionValueRuleSetServiceGrpc.getMutateConversionValueRuleSetsMethod) == null) {
ConversionValueRuleSetServiceGrpc.getMutateConversionValueRuleSetsMethod = getMutateConversionValueRuleSetsMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest, com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "MutateConversionValueRuleSets"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse.getDefaultInstance()))
.setSchemaDescriptor(new ConversionValueRuleSetServiceMethodDescriptorSupplier("MutateConversionValueRuleSets"))
.build();
}
}
}
return getMutateConversionValueRuleSetsMethod;
}
/**
* Creates a new async stub that supports all call types for the service
*/
public static ConversionValueRuleSetServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ConversionValueRuleSetServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ConversionValueRuleSetServiceStub>() {
@java.lang.Override
public ConversionValueRuleSetServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConversionValueRuleSetServiceStub(channel, callOptions);
}
};
return ConversionValueRuleSetServiceStub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static ConversionValueRuleSetServiceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ConversionValueRuleSetServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ConversionValueRuleSetServiceBlockingStub>() {
@java.lang.Override
public ConversionValueRuleSetServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConversionValueRuleSetServiceBlockingStub(channel, callOptions);
}
};
return ConversionValueRuleSetServiceBlockingStub.newStub(factory, channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static ConversionValueRuleSetServiceFutureStub newFutureStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ConversionValueRuleSetServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ConversionValueRuleSetServiceFutureStub>() {
@java.lang.Override
public ConversionValueRuleSetServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConversionValueRuleSetServiceFutureStub(channel, callOptions);
}
};
return ConversionValueRuleSetServiceFutureStub.newStub(factory, channel);
}
/**
* <pre>
* Service to manage conversion value rule sets.
* </pre>
*/
public static abstract class ConversionValueRuleSetServiceImplBase implements io.grpc.BindableService {
/**
* <pre>
* Returns the requested conversion value rule set.
* </pre>
*/
public void getConversionValueRuleSet(com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v8.resources.ConversionValueRuleSet> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetConversionValueRuleSetMethod(), responseObserver);
}
/**
* <pre>
* Creates, updates or removes conversion value rule sets. Operation statuses
* are returned.
* </pre>
*/
public void mutateConversionValueRuleSets(com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMutateConversionValueRuleSetsMethod(), responseObserver);
}
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getGetConversionValueRuleSetMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest,
com.google.ads.googleads.v8.resources.ConversionValueRuleSet>(
this, METHODID_GET_CONVERSION_VALUE_RULE_SET)))
.addMethod(
getMutateConversionValueRuleSetsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest,
com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse>(
this, METHODID_MUTATE_CONVERSION_VALUE_RULE_SETS)))
.build();
}
}
/**
* <pre>
* Service to manage conversion value rule sets.
* </pre>
*/
public static final class ConversionValueRuleSetServiceStub extends io.grpc.stub.AbstractAsyncStub<ConversionValueRuleSetServiceStub> {
private ConversionValueRuleSetServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ConversionValueRuleSetServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConversionValueRuleSetServiceStub(channel, callOptions);
}
/**
* <pre>
* Returns the requested conversion value rule set.
* </pre>
*/
public void getConversionValueRuleSet(com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v8.resources.ConversionValueRuleSet> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetConversionValueRuleSetMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Creates, updates or removes conversion value rule sets. Operation statuses
* are returned.
* </pre>
*/
public void mutateConversionValueRuleSets(com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getMutateConversionValueRuleSetsMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* <pre>
* Service to manage conversion value rule sets.
* </pre>
*/
public static final class ConversionValueRuleSetServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<ConversionValueRuleSetServiceBlockingStub> {
private ConversionValueRuleSetServiceBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ConversionValueRuleSetServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConversionValueRuleSetServiceBlockingStub(channel, callOptions);
}
/**
* <pre>
* Returns the requested conversion value rule set.
* </pre>
*/
public com.google.ads.googleads.v8.resources.ConversionValueRuleSet getConversionValueRuleSet(com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetConversionValueRuleSetMethod(), getCallOptions(), request);
}
/**
* <pre>
* Creates, updates or removes conversion value rule sets. Operation statuses
* are returned.
* </pre>
*/
public com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse mutateConversionValueRuleSets(com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getMutateConversionValueRuleSetsMethod(), getCallOptions(), request);
}
}
/**
* <pre>
* Service to manage conversion value rule sets.
* </pre>
*/
public static final class ConversionValueRuleSetServiceFutureStub extends io.grpc.stub.AbstractFutureStub<ConversionValueRuleSetServiceFutureStub> {
private ConversionValueRuleSetServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ConversionValueRuleSetServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConversionValueRuleSetServiceFutureStub(channel, callOptions);
}
/**
* <pre>
* Returns the requested conversion value rule set.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v8.resources.ConversionValueRuleSet> getConversionValueRuleSet(
com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetConversionValueRuleSetMethod(), getCallOptions()), request);
}
/**
* <pre>
* Creates, updates or removes conversion value rule sets. Operation statuses
* are returned.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse> mutateConversionValueRuleSets(
com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getMutateConversionValueRuleSetsMethod(), getCallOptions()), request);
}
}
private static final int METHODID_GET_CONVERSION_VALUE_RULE_SET = 0;
private static final int METHODID_MUTATE_CONVERSION_VALUE_RULE_SETS = 1;
private static final class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final ConversionValueRuleSetServiceImplBase serviceImpl;
private final int methodId;
MethodHandlers(ConversionValueRuleSetServiceImplBase serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_GET_CONVERSION_VALUE_RULE_SET:
serviceImpl.getConversionValueRuleSet((com.google.ads.googleads.v8.services.GetConversionValueRuleSetRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v8.resources.ConversionValueRuleSet>) responseObserver);
break;
case METHODID_MUTATE_CONVERSION_VALUE_RULE_SETS:
serviceImpl.mutateConversionValueRuleSets((com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v8.services.MutateConversionValueRuleSetsResponse>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
private static abstract class ConversionValueRuleSetServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
ConversionValueRuleSetServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.ads.googleads.v8.services.ConversionValueRuleSetServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("ConversionValueRuleSetService");
}
}
private static final class ConversionValueRuleSetServiceFileDescriptorSupplier
extends ConversionValueRuleSetServiceBaseDescriptorSupplier {
ConversionValueRuleSetServiceFileDescriptorSupplier() {}
}
private static final class ConversionValueRuleSetServiceMethodDescriptorSupplier
extends ConversionValueRuleSetServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final String methodName;
ConversionValueRuleSetServiceMethodDescriptorSupplier(String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (ConversionValueRuleSetServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new ConversionValueRuleSetServiceFileDescriptorSupplier())
.addMethod(getGetConversionValueRuleSetMethod())
.addMethod(getMutateConversionValueRuleSetsMethod())
.build();
}
}
}
return result;
}
}
| |
package me.devsaki.hentoid.adapters;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.RecyclerView.ViewHolder;
import android.text.TextUtils;
import android.util.SparseBooleanArray;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.RequestOptions;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import me.devsaki.hentoid.R;
import me.devsaki.hentoid.database.HentoidDB;
import me.devsaki.hentoid.database.domains.Attribute;
import me.devsaki.hentoid.database.domains.Content;
import me.devsaki.hentoid.database.domains.ImageFile;
import me.devsaki.hentoid.enums.AttributeType;
import me.devsaki.hentoid.enums.StatusContent;
import me.devsaki.hentoid.listener.ItemClickListener;
import me.devsaki.hentoid.listener.ItemClickListener.ItemSelectListener;
import me.devsaki.hentoid.services.DownloadService;
import me.devsaki.hentoid.util.FileHelper;
import me.devsaki.hentoid.util.Helper;
import timber.log.Timber;
import static com.bumptech.glide.load.resource.drawable.DrawableTransitionOptions.withCrossFade;
/**
* Created by avluis on 04/23/2016.
* RecyclerView based Content Adapter
*/
public class ContentAdapter extends RecyclerView.Adapter<ViewHolder> {
private static final int VISIBLE_THRESHOLD = 10;
private static final int VIEW_TYPE_LOADING = 0;
private static final int VIEW_TYPE_ITEM = 1;
private final Context cxt;
private final SparseBooleanArray selectedItems;
private final ItemSelectListener listener;
private boolean isFooterEnabled = true;
private ContentsWipedListener contentsWipedListener;
private EndlessScrollListener endlessScrollListener;
private List<Content> contents = new ArrayList<>();
public ContentAdapter(Context cxt, final List<Content> contents, ItemSelectListener listener) {
this.cxt = cxt;
this.contents = contents;
this.listener = listener;
selectedItems = new SparseBooleanArray();
}
public void setEndlessScrollListener(EndlessScrollListener listener) {
this.endlessScrollListener = listener;
}
public void setContentsWipedListener(ContentsWipedListener listener) {
this.contentsWipedListener = listener;
}
private void toggleSelection(int pos) {
if (selectedItems.get(pos, false)) {
selectedItems.delete(pos);
Timber.d("Removed item: %s", pos);
} else {
selectedItems.put(pos, true);
Timber.d("Added item: %s", pos);
}
notifyItemChanged(pos);
}
public void clearSelections() {
selectedItems.clear();
notifyDataSetChanged();
}
private int getSelectedItemCount() {
return selectedItems.size();
}
private List<Integer> getSelectedItems() {
List<Integer> items = new ArrayList<>(selectedItems.size());
for (int i = 0; i < selectedItems.size(); i++) {
items.add(selectedItems.keyAt(i));
}
return items;
}
private boolean getSelectedItem(int item) {
for (int i = 0; i < selectedItems.size(); i++) {
if (selectedItems.keyAt(i) == item) {
return selectedItems.get(item);
}
}
return false;
}
public void setContentList(List<Content> contentList) {
this.contents = contentList;
updateContentList();
}
public void updateContentList() {
this.notifyDataSetChanged();
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
ViewHolder viewHolder;
if (viewType == VIEW_TYPE_LOADING) {
View view = LayoutInflater.from(parent.getContext()).inflate(
R.layout.progress, parent, false);
viewHolder = new ProgressViewHolder(view);
} else {
View view = LayoutInflater.from(parent.getContext()).inflate(
R.layout.item_download, parent, false);
viewHolder = new ContentHolder(view);
}
return viewHolder;
}
@Override
public void onBindViewHolder(final ViewHolder holder, final int pos) {
if (holder instanceof ProgressViewHolder) {
((ProgressViewHolder) holder).progressBar.setIndeterminate(true);
} else if (contents.size() > 0 && pos < contents.size()) {
final Content content = contents.get(pos);
updateLayoutVisibility((ContentHolder) holder, content, pos);
populateLayout((ContentHolder) holder, content, pos);
attachOnClickListeners((ContentHolder) holder, content, pos);
}
}
private void updateLayoutVisibility(ContentHolder holder, Content content, int pos) {
if (pos == getItemCount() - VISIBLE_THRESHOLD && endlessScrollListener != null) {
endlessScrollListener.onLoadMore();
}
if (endlessScrollListener == null) {
enableFooter(false);
}
if (getSelectedItems() != null) {
int itemPos = holder.getLayoutPosition();
boolean selected = getSelectedItem(itemPos);
if (getSelectedItem(itemPos)) {
holder.itemView.setSelected(selected);
} else {
holder.itemView.setSelected(false);
}
}
final RelativeLayout items = (RelativeLayout) holder.itemView.findViewById(R.id.item);
LinearLayout minimal = (LinearLayout) holder.itemView.findViewById(R.id.item_minimal);
if (holder.itemView.isSelected()) {
Timber.d("Position: %s %s is a selected item currently in view.", pos, content.getTitle());
if (getSelectedItemCount() >= 1) {
items.setVisibility(View.GONE);
minimal.setVisibility(View.VISIBLE);
}
} else {
items.setVisibility(View.VISIBLE);
minimal.setVisibility(View.GONE);
}
}
private void populateLayout(ContentHolder holder, final Content content, int pos) {
attachTitle(holder, content);
attachCover(holder, content);
attachSeries(holder, content);
attachArtist(holder, content);
attachTags(holder, content);
attachSite(holder, content, pos);
}
private void attachTitle(ContentHolder holder, Content content) {
if (content.getTitle() == null) {
holder.tvTitle.setText(R.string.work_untitled);
if (holder.itemView.isSelected()) {
holder.tvTitle2.setText(R.string.work_untitled);
}
} else {
holder.tvTitle.setText(content.getTitle());
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.LOLLIPOP_MR1) {
holder.tvTitle.setEllipsize(TextUtils.TruncateAt.MARQUEE);
holder.tvTitle.setSingleLine(true);
holder.tvTitle.setMarqueeRepeatLimit(5);
}
holder.tvTitle.setSelected(true);
if (holder.itemView.isSelected()) {
holder.tvTitle2.setText(content.getTitle());
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.LOLLIPOP_MR1) {
holder.tvTitle2.setEllipsize(TextUtils.TruncateAt.MARQUEE);
holder.tvTitle2.setSingleLine(true);
holder.tvTitle2.setMarqueeRepeatLimit(5);
}
holder.tvTitle2.setSelected(true);
}
}
}
private void attachCover(ContentHolder holder, Content content) {
// The following is needed due to RecyclerView recycling layouts and
// Glide not considering the layout invalid for the current image:
// https://github.com/bumptech/glide/issues/835#issuecomment-167438903
holder.ivCover.layout(0, 0, 0, 0);
holder.ivCover2.layout(0, 0, 0, 0);
RequestOptions myOptions = new RequestOptions()
.diskCacheStrategy(DiskCacheStrategy.ALL)
.fitCenter()
.placeholder(R.drawable.ic_placeholder)
.error(R.drawable.ic_placeholder);
Glide.with(cxt.getApplicationContext())
.load(FileHelper.getThumb(cxt, content))
.apply(myOptions)
.transition(withCrossFade())
.into(holder.ivCover);
if (holder.itemView.isSelected()) {
Glide.with(cxt.getApplicationContext())
.load(FileHelper.getThumb(cxt, content))
.apply(myOptions)
.transition(withCrossFade())
.into(holder.ivCover2);
}
}
private void attachSeries(ContentHolder holder, Content content) {
String templateSeries = cxt.getResources().getString(R.string.work_series);
String series = "";
List<Attribute> seriesAttributes = content.getAttributes().get(AttributeType.SERIE);
if (seriesAttributes == null) {
holder.tvSeries.setVisibility(View.GONE);
} else {
for (int i = 0; i < seriesAttributes.size(); i++) {
Attribute attribute = seriesAttributes.get(i);
series += attribute.getName();
if (i != seriesAttributes.size() - 1) {
series += ", ";
}
}
holder.tvSeries.setVisibility(View.VISIBLE);
}
holder.tvSeries.setText(Helper.fromHtml(templateSeries.replace("@series@", series)));
if (seriesAttributes == null) {
holder.tvSeries.setText(Helper.fromHtml(templateSeries.replace("@series@",
cxt.getResources().getString(R.string.work_untitled))));
holder.tvSeries.setVisibility(View.VISIBLE);
}
}
private void attachArtist(ContentHolder holder, Content content) {
String templateArtist = cxt.getResources().getString(R.string.work_artist);
String artists = "";
List<Attribute> artistAttributes = content.getAttributes().get(AttributeType.ARTIST);
if (artistAttributes == null) {
holder.tvArtist.setVisibility(View.GONE);
} else {
for (int i = 0; i < artistAttributes.size(); i++) {
Attribute attribute = artistAttributes.get(i);
artists += attribute.getName();
if (i != artistAttributes.size() - 1) {
artists += ", ";
}
}
holder.tvArtist.setVisibility(View.VISIBLE);
}
holder.tvArtist.setText(Helper.fromHtml(templateArtist.replace("@artist@", artists)));
if (artistAttributes == null) {
holder.tvArtist.setText(Helper.fromHtml(templateArtist.replace("@artist@",
cxt.getResources().getString(R.string.work_untitled))));
holder.tvArtist.setVisibility(View.VISIBLE);
}
}
private void attachTags(ContentHolder holder, Content content) {
String templateTags = cxt.getResources().getString(R.string.work_tags);
String tags = "";
List<Attribute> tagsAttributes = content.getAttributes().get(AttributeType.TAG);
if (tagsAttributes != null) {
for (int i = 0; i < tagsAttributes.size(); i++) {
Attribute attribute = tagsAttributes.get(i);
if (attribute.getName() != null) {
tags += templateTags.replace("@tag@", attribute.getName());
if (i != tagsAttributes.size() - 1) {
tags += ", ";
}
}
}
}
holder.tvTags.setText(Helper.fromHtml(tags));
}
private void attachSite(ContentHolder holder, final Content content, int pos) {
if (content.getSite() != null) {
int img = content.getSite().getIco();
holder.ivSite.setImageResource(img);
holder.ivSite.setOnClickListener(v -> {
if (getSelectedItemCount() >= 1) {
clearSelections();
listener.onItemClear(0);
}
Helper.viewContent(cxt, content);
});
} else {
holder.ivSite.setImageResource(R.drawable.ic_stat_hentoid);
}
if (content.getStatus() != null) {
StatusContent status = content.getStatus();
int bg;
switch (status) {
case DOWNLOADED:
bg = R.color.card_item_src_normal;
break;
case MIGRATED:
bg = R.color.card_item_src_migrated;
break;
default:
Timber.d("Position: %s %s - Status: %s", pos, content.getTitle(), status);
bg = R.color.card_item_src_other;
break;
}
holder.ivSite.setBackgroundColor(ContextCompat.getColor(cxt, bg));
if (status == StatusContent.ERROR) {
holder.ivError.setVisibility(View.VISIBLE);
holder.ivError.setOnClickListener(v -> {
if (getSelectedItemCount() >= 1) {
clearSelections();
listener.onItemClear(0);
}
downloadAgain(content);
});
} else {
holder.ivError.setVisibility(View.GONE);
}
} else {
holder.ivSite.setVisibility(View.GONE);
}
}
private void attachOnClickListeners(final ContentHolder holder, Content content, int pos) {
holder.itemView.setOnClickListener(new ItemClickListener(cxt, content, pos, listener) {
@Override
public void onClick(View v) {
if (getSelectedItems() != null) {
int itemPos = holder.getLayoutPosition();
boolean selected = getSelectedItem(itemPos);
boolean selectionMode = getSelectedItemCount() > 0;
if (selectionMode) {
Timber.d("In Selection Mode - ignore open requests.");
if (selected) {
Timber.d("Item already selected, remove it.");
toggleSelection(itemPos);
setSelected(false, getSelectedItemCount());
} else {
Timber.d("Item not selected, add it.");
toggleSelection(itemPos);
setSelected(true, getSelectedItemCount());
}
onLongClick(v);
} else {
Timber.d("Not in selection mode, opening item.");
clearSelections();
setSelected(false, 0);
super.onClick(v);
}
}
}
});
holder.itemView.setOnLongClickListener(new ItemClickListener(cxt, content, pos, listener) {
@Override
public boolean onLongClick(View v) {
if (getSelectedItems() != null) {
int itemPos = holder.getLayoutPosition();
boolean selected = getSelectedItem(itemPos);
if (selected) {
Timber.d("Item already selected, remove it.");
toggleSelection(itemPos);
setSelected(false, getSelectedItemCount());
} else {
Timber.d("Item not selected, add it.");
toggleSelection(itemPos);
setSelected(true, getSelectedItemCount());
}
super.onLongClick(v);
return true;
}
return false;
}
});
}
private void downloadAgain(final Content item) {
int images;
int imgErrors = 0;
images = item.getImageFiles().size();
for (ImageFile imgFile : item.getImageFiles()) {
if (imgFile.getStatus() == StatusContent.ERROR) {
imgErrors++;
}
}
String message = cxt.getString(R.string.download_again_dialog_message).replace(
"@error", imgErrors + "").replace("@total", images + "");
AlertDialog.Builder builder = new AlertDialog.Builder(cxt);
builder.setTitle(R.string.download_again_dialog_title)
.setMessage(message)
.setPositiveButton(android.R.string.yes,
(dialog, which) -> {
HentoidDB db = HentoidDB.getInstance(cxt);
item.setStatus(StatusContent.DOWNLOADING);
item.setDownloadDate(new Date().getTime());
db.updateContentStatus(item);
Intent intent = new Intent(Intent.ACTION_SYNC, null, cxt,
DownloadService.class);
cxt.startService(intent);
Helper.toast(cxt, R.string.add_to_queue);
removeItem(item);
notifyDataSetChanged();
})
.setNegativeButton(android.R.string.no, null)
.create().show();
}
private void shareContent(final Content item) {
String url = item.getGalleryUrl();
Intent intent = new Intent();
intent.setAction(Intent.ACTION_SEND);
intent.setData(Uri.parse(url));
intent.putExtra(Intent.EXTRA_SUBJECT, item.getTitle());
intent.putExtra(Intent.EXTRA_TEXT, url);
intent.setType("text/plain");
cxt.startActivity(Intent.createChooser(intent, cxt.getString(R.string.send_to)));
}
private void archiveContent(final Content item) {
Helper.toast(R.string.packaging_content);
FileHelper.archiveContent(cxt, item);
}
private void deleteContent(final Content item) {
AlertDialog.Builder builder = new AlertDialog.Builder(cxt);
builder.setMessage(R.string.ask_delete)
.setPositiveButton(android.R.string.yes,
(dialog, which) -> {
clearSelections();
deleteItem(item);
})
.setNegativeButton(android.R.string.no,
(dialog, which) -> {
clearSelections();
listener.onItemClear(0);
})
.create().show();
}
private void deleteContents(final List<Content> items) {
AlertDialog.Builder builder = new AlertDialog.Builder(cxt);
builder.setMessage(R.string.ask_delete_multiple)
.setPositiveButton(android.R.string.yes,
(dialog, which) -> {
clearSelections();
deleteItems(items);
})
.setNegativeButton(android.R.string.no,
(dialog, which) -> {
clearSelections();
listener.onItemClear(0);
})
.create().show();
}
@Override
public long getItemId(int position) {
return contents.get(position).getId();
}
@Override
public int getItemCount() {
return (isFooterEnabled) ? contents.size() + 1 : contents.size();
}
@Override
public int getItemViewType(int pos) {
return (isFooterEnabled && pos >= contents.size()) ? VIEW_TYPE_LOADING : VIEW_TYPE_ITEM;
}
public void enableFooter(boolean isEnabled) {
this.isFooterEnabled = isEnabled;
}
public void sharedSelectedItems() {
int itemCount = getSelectedItemCount();
if (itemCount > 0) {
if (itemCount == 1) {
Timber.d("Preparing to share selected item...");
List<Content> items;
items = processSelection();
if (!items.isEmpty()) {
shareContent(items.get(0));
} else {
listener.onItemClear(0);
Timber.d("Nothing to share!!");
}
} else {
// TODO: Implement multi-item share
Timber.d("How even?");
Helper.toast("Not yet implemented!!");
}
} else {
listener.onItemClear(0);
Timber.d("No items to share!!");
}
}
public void purgeSelectedItems() {
int itemCount = getSelectedItemCount();
if (itemCount > 0) {
if (itemCount == 1) {
Timber.d("Preparing to delete selected item...");
List<Content> items;
items = processSelection();
if (!items.isEmpty()) {
deleteContent(items.get(0));
} else {
listener.onItemClear(0);
Timber.d("Nothing to delete!!");
}
} else {
Timber.d("Preparing to delete selected items...");
List<Content> items;
items = processSelection();
if (!items.isEmpty()) {
deleteContents(items);
} else {
listener.onItemClear(0);
Timber.d("No items to delete!!");
}
}
} else {
listener.onItemClear(0);
Timber.d("No items to delete!!");
}
}
public void archiveSelectedItems() {
int itemCount = getSelectedItemCount();
if (itemCount > 0) {
if (itemCount == 1) {
Timber.d("Preparing to archive selected item...");
List<Content> items;
items = processSelection();
if (!items.isEmpty()) {
archiveContent(items.get(0));
} else {
listener.onItemClear(0);
Timber.d("Nothing to archive!!");
}
} else {
// TODO: Implement multi-item archival
Timber.d("How even?");
Helper.toast("Not yet implemented!!");
}
} else {
listener.onItemClear(0);
Timber.d("No items to archive!!");
}
}
private List<Content> processSelection() {
List<Content> selectionList = new ArrayList<>();
List<Integer> selection = getSelectedItems();
Timber.d("Selected items: %s", selection);
for (int i = 0; i < selection.size(); i++) {
selectionList.add(i, contents.get(selection.get(i)));
Timber.d("Added: %s to list.", contents.get(selection.get(i)).getTitle());
}
return selectionList;
}
private void removeItem(Content item) {
removeItem(item, true);
}
private void removeItem(Content item, boolean broadcast) {
int position = contents.indexOf(item);
Timber.d("Removing item: %s from adapter.", item.getTitle());
contents.remove(position);
notifyItemRemoved(position);
if (contents != null) {
if (contents.size() == 0) {
contentsWipedListener.onContentsWiped();
}
if (broadcast) {
listener.onItemClear(0);
}
}
}
private void deleteItem(final Content item) {
final HentoidDB db = HentoidDB.getInstance(cxt);
removeItem(item);
AsyncTask.execute(() -> {
FileHelper.removeContent(cxt, item);
db.deleteContent(item);
Timber.d("Removed item: %s from db and file system.", item.getTitle());
});
notifyDataSetChanged();
Helper.toast(cxt, cxt.getString(R.string.deleted).replace("@content", item.getTitle()));
}
private void deleteItems(final List<Content> items) {
final HentoidDB db = HentoidDB.getInstance(cxt);
for (int i = 0; i < items.size(); i++) {
removeItem(items.get(i), false);
}
AsyncTask.execute(() -> {
for (int i = 0; i < items.size(); i++) {
FileHelper.removeContent(cxt, items.get(i));
db.deleteContent(items.get(i));
Timber.d("Removed item: " + items.get(i).getTitle()
+ " from db and file system.");
}
});
listener.onItemClear(0);
notifyDataSetChanged();
Helper.toast(cxt, "Selected items have been deleted.");
}
public interface EndlessScrollListener {
void onLoadMore();
}
public interface ContentsWipedListener {
void onContentsWiped();
}
private static class ProgressViewHolder extends RecyclerView.ViewHolder {
final ProgressBar progressBar;
ProgressViewHolder(View itemView) {
super(itemView);
progressBar = (ProgressBar) itemView.findViewById(R.id.loadingProgress);
}
}
}
| |
/*
* #%L
* =====================================================
* _____ _ ____ _ _ _ _
* |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | |
* | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| |
* | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ |
* |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_|
* \____/
*
* =====================================================
*
* Hochschule Hannover
* (University of Applied Sciences and Arts, Hannover)
* Faculty IV, Dept. of Computer Science
* Ricklinger Stadtweg 118, 30459 Hannover, Germany
*
* Email: trust@f4-i.fh-hannover.de
* Website: http://trust.f4.hs-hannover.de
*
* This file is part of ifmapcli (ex-meta), version 0.3.1, implemented by the Trust@HsH
* research group at the Hochschule Hannover.
* %%
* Copyright (C) 2010 - 2015 Trust@HsH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package de.hshannover.f4.trust.ifmapcli;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import net.sourceforge.argparse4j.inf.ArgumentParser;
import org.w3c.dom.Document;
import util.DomHelpers;
import de.hshannover.f4.trust.ifmapcli.common.AbstractClient;
import de.hshannover.f4.trust.ifmapcli.common.ParserUtil;
import de.hshannover.f4.trust.ifmapcli.common.enums.IdType;
import de.hshannover.f4.trust.ifmapj.binding.IfmapStrings;
import de.hshannover.f4.trust.ifmapj.channel.SSRC;
import de.hshannover.f4.trust.ifmapj.exception.MarshalException;
import de.hshannover.f4.trust.ifmapj.identifier.Identifier;
import de.hshannover.f4.trust.ifmapj.identifier.Identifiers;
import de.hshannover.f4.trust.ifmapj.messages.MetadataLifetime;
import de.hshannover.f4.trust.ifmapj.messages.PublishDelete;
import de.hshannover.f4.trust.ifmapj.messages.PublishRequest;
import de.hshannover.f4.trust.ifmapj.messages.PublishUpdate;
import de.hshannover.f4.trust.ifmapj.messages.Requests;
import de.hshannover.f4.trust.ifmapj.metadata.Cardinality;
/**
* A simple tool that publishes or deletes extended-metadata. When metadata is
* published, the lifetime is set to be 'forever'.
*
* @author Marius Rohde
*
*/
public class ExMeta extends AbstractClient {
/**
*
* Method to initialize the parser operations
*
* @param args
* the arguments submitted to the cli module
*
*/
private static void initParser(String[] args) {
command = "ex-meta";
// ---parsing-------
ArgumentParser parser = createDefaultParser();
ParserUtil.addPublishOperation(parser);
ParserUtil.addIdentifierType(parser, IdType.ipv4, IdType.ipv6,
IdType.mac, IdType.dev, IdType.ar, IdType.id, IdType.id_aik,
IdType.id_dist, IdType.id_dns, IdType.id_email,
IdType.id_hiphit, IdType.id_kerberos, IdType.id_sip,
IdType.id_tel, IdType.id_user, IdType.exid);
ParserUtil.addIdentifierOrEx(parser);
ParserUtil.addSecIdentifierType(parser, IdType.ipv4, IdType.ipv6,
IdType.mac, IdType.dev, IdType.ar, IdType.id, IdType.id_aik,
IdType.id_dist, IdType.id_dns, IdType.id_email,
IdType.id_hiphit, IdType.id_kerberos, IdType.id_sip,
IdType.id_tel, IdType.id_user, IdType.exid);
ParserUtil.addSecIdentifier(parser);
ParserUtil.addElementName(parser);
ParserUtil.addCardinality(parser);
ParserUtil.addAttributeName(parser);
ParserUtil.addAttributeValue(parser);
ParserUtil.addMetaFileInSystemIn(parser);
ParserUtil.addNamespacePrefix(parser);
ParserUtil.addNamespaceUri(parser);
parseParameters(parser, args);
}
/**
* Method to start this module
*
*/
public static void main(String[] args) {
initParser(args);
printParameters(KEY_OPERATION, new String[] { KEY_IDENTIFIER_TYPE, KEY_IDENTIFIER, KEY_SEC_IDENTIFIER_TYPE,
KEY_SEC_IDENTIFIER, KEY_ELEMENT_NAME, KEY_CARDINALITY, KEY_ATTRIBUTE_NAME, KEY_ATTRIBUTE_VALUE,
KEY_NAMESPACE_PREFIX, KEY_NAMESPACE_URI, KEY_META_FILE_IN_SYSTEM_IN });
// ---get parameters and set Defaults if necessary-------
String nsPrefix = resource.getString(KEY_NAMESPACE_PREFIX);
String nsUri = resource.getString(KEY_NAMESPACE_PREFIX);
String attrName = resource.getString(KEY_ATTRIBUTE_NAME);
String attrValue = resource.getString(KEY_ATTRIBUTE_VALUE);
String elementName = resource.getString(KEY_ELEMENT_NAME);
if (nsPrefix == null) {
nsPrefix = IfmapStrings.STD_METADATA_PREFIX;
}
if (nsUri == null) {
nsUri = IfmapStrings.STD_METADATA_NS_URI;
}
// prepare the identifier
IdType identifierType1 = resource.get(KEY_IDENTIFIER_TYPE);
String identifierName1 = resource.getString(KEY_IDENTIFIER);
Identifier identifier1 = null;
if (identifierType1 == IdType.exid) {
try {
identifier1 = Identifiers
.createExtendedIdentity(new FileInputStream(
identifierName1));
} catch (MarshalException e) {
e.printStackTrace();
} catch (FileNotFoundException e) {
e.printStackTrace();
}
} else {
identifier1 = getIdentifier(identifierType1, identifierName1);
}
IdType identifierType2 = resource.get(KEY_SEC_IDENTIFIER_TYPE);
String identifierName2 = resource.getString(KEY_SEC_IDENTIFIER);
Identifier identifier2 = null;
if (identifierType2 == null && identifierName2 != null) {
throw new RuntimeException("no identifier type specified for given identifier: " + identifierName2);
} else if (identifierType2 != null && identifierName2 == null) {
throw new RuntimeException("no identifier specified for given identifier type: " + identifierType2);
} else if (identifierType2 != null && identifierName2 != null) {
if (identifierType2 == IdType.exid) {
try {
identifier2 = Identifiers
.createExtendedIdentity(new FileInputStream(
identifierName2));
} catch (MarshalException e) {
e.printStackTrace();
} catch (FileNotFoundException e) {
e.printStackTrace();
}
} else {
identifier2 = getIdentifier(identifierType2,
identifierName2);
}
}
// look for inputfile or stream for metadata if not look for given
// elementname
Document metadata = null;
File input = resource.get(KEY_META_FILE_IN_SYSTEM_IN);
if (input != null) {
metadata = readFileInSystemIn(input);
} else {
if (elementName.equals("")) {
throw new RuntimeException("no element name or no xmlfile specified for metadata");
}
}
// if nothing
if (metadata == null) {
if (resource.getString(KEY_CARDINALITY).equals("singleValue")) {
if (attrName.equals("") || attrValue.equals("")) {
metadata = mf.create(elementName, nsPrefix, nsUri, Cardinality.singleValue);
} else {
metadata = mf.create(elementName, nsPrefix, nsUri, Cardinality.singleValue, attrName, attrValue);
}
} else {
if (attrName.equals("") || attrValue.equals("")) {
metadata = mf.create(elementName, nsPrefix, nsUri, Cardinality.multiValue);
} else {
metadata = mf.create(elementName, nsPrefix, nsUri, Cardinality.multiValue, attrName, attrValue);
}
}
}
publishDeleteMetaData(identifier1, identifier2, metadata);
}
/**
*
* Helper method to read inputstream of file or system in
*
* @param input
* A File Object specifying System in or a File
*
* @return A XML Metadata Document
*/
private static Document readFileInSystemIn(File input) {
Document metadata = null;
if (input.getName().equals("-")) {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
// Workaround for multiline system in
String line = "";
try {
line = br.readLine();
while (br.ready()) {
line += br.readLine();
}
} catch (IOException e) {
e.printStackTrace();
}
// ----------
try {
metadata = DomHelpers.toDocument(line, null);
} catch (MarshalException e) {
e.printStackTrace();
}
} else {
try {
metadata = DomHelpers.toDocument(new FileInputStream(input));
} catch (MarshalException e) {
e.printStackTrace();
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
return metadata;
}
/**
*
* helper Method to publish/delete the metadata
*
* @param identifier1
* the identifier1
* @param identifier2
* the optional identifier2 must be null if not exist
* @param metadata
* the metadata xml document
*
*/
private static void publishDeleteMetaData(Identifier identifier1, Identifier identifier2, Document metadata) {
String nsPrefix = metadata.getChildNodes().item(0).getPrefix();
String nsUri = metadata.getChildNodes().item(0).getNamespaceURI();
String elementname = metadata.getChildNodes().item(0).getLocalName();
try {
SSRC ssrc = createSSRC();
ssrc.newSession();
PublishRequest request;
if (isUpdate(KEY_OPERATION)) {
if (identifier2 == null) {
PublishUpdate publishUpdate = Requests.createPublishUpdate(identifier1, metadata,
MetadataLifetime.forever);
request = Requests.createPublishReq(publishUpdate);
} else {
PublishUpdate publishUpdate = Requests.createPublishUpdate(identifier1, identifier2, metadata,
MetadataLifetime.forever);
request = Requests.createPublishReq(publishUpdate);
}
} else {
PublishDelete publishDelete;
String filter = String.format(nsPrefix + ":" + elementname + "[@ifmap-publisher-id='%s']",
ssrc.getPublisherId());
if (identifier2 == null) {
publishDelete = Requests.createPublishDelete(identifier1, filter);
} else {
publishDelete = Requests.createPublishDelete(identifier1, identifier2, filter);
}
publishDelete.addNamespaceDeclaration(nsPrefix, nsUri);
request = Requests.createPublishReq(publishDelete);
}
ssrc.publish(request);
ssrc.endSession();
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
}
| |
/*******************************************************************************
* Copyright 2012 Apigee Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.usergrid.mq;
import static org.apache.commons.codec.binary.Base64.decodeBase64;
import static org.apache.commons.lang.StringUtils.isBlank;
import static org.apache.commons.lang.StringUtils.split;
import static org.usergrid.persistence.Schema.PROPERTY_TYPE;
import static org.usergrid.persistence.Schema.PROPERTY_UUID;
import static org.usergrid.utils.ClassUtils.cast;
import static org.usergrid.utils.ConversionUtils.uuid;
import static org.usergrid.utils.ListUtils.first;
import static org.usergrid.utils.ListUtils.firstBoolean;
import static org.usergrid.utils.ListUtils.firstInteger;
import static org.usergrid.utils.ListUtils.firstLong;
import static org.usergrid.utils.ListUtils.firstUuid;
import static org.usergrid.utils.ListUtils.isEmpty;
import static org.usergrid.utils.MapUtils.toMapList;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CommonTokenStream;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.usergrid.persistence.CounterResolution;
import org.usergrid.persistence.Entity;
import org.usergrid.persistence.Identifier;
import org.usergrid.persistence.Results;
import org.usergrid.persistence.Results.Level;
import org.usergrid.utils.JsonUtils;
public class Query {
private static final Logger logger = LoggerFactory.getLogger(Query.class);
public static final int DEFAULT_LIMIT = 10;
protected String type;
protected List<SortPredicate> sortPredicates = new ArrayList<SortPredicate>();
protected List<FilterPredicate> filterPredicates = new ArrayList<FilterPredicate>();
protected UUID startResult;
protected String cursor;
protected int limit = 0;
protected boolean limitSet = false;
protected Map<String, String> selectSubjects = new LinkedHashMap<String, String>();
protected boolean mergeSelectResults = false;
protected Level level = Level.ALL_PROPERTIES;
protected String connection;
protected List<String> permissions;
protected boolean reversed;
protected boolean reversedSet = false;
protected Long startTime;
protected Long finishTime;
protected boolean pad;
protected CounterResolution resolution = CounterResolution.ALL;
protected List<Identifier> users;
protected List<Identifier> groups;
protected List<Identifier> identifiers;
protected List<String> categories;
protected List<CounterFilterPredicate> counterFilters;
public Query() {
}
public Query(String type) {
this.type = type;
}
public Query(Query q) {
if (q != null) {
type = q.type;
sortPredicates = q.sortPredicates != null ? new ArrayList<SortPredicate>(
q.sortPredicates) : null;
filterPredicates = q.filterPredicates != null ? new ArrayList<FilterPredicate>(
q.filterPredicates) : null;
startResult = q.startResult;
cursor = q.cursor;
limit = q.limit;
limitSet = q.limitSet;
selectSubjects = q.selectSubjects != null ? new LinkedHashMap<String, String>(
q.selectSubjects) : null;
mergeSelectResults = q.mergeSelectResults;
level = q.level;
connection = q.connection;
permissions = q.permissions != null ? new ArrayList<String>(
q.permissions) : null;
reversed = q.reversed;
reversedSet = q.reversedSet;
startTime = q.startTime;
finishTime = q.finishTime;
resolution = q.resolution;
pad = q.pad;
users = q.users != null ? new ArrayList<Identifier>(q.users) : null;
groups = q.groups != null ? new ArrayList<Identifier>(q.groups)
: null;
identifiers = q.identifiers != null ? new ArrayList<Identifier>(
q.identifiers) : null;
categories = q.categories != null ? new ArrayList<String>(
q.categories) : null;
counterFilters = q.counterFilters != null ? new ArrayList<CounterFilterPredicate>(
q.counterFilters) : null;
}
}
public static Query fromQL(String ql) {
if (ql == null) {
return null;
}
ql = ql.trim();
String qlt = ql.toLowerCase();
if (!qlt.startsWith("select") && !qlt.startsWith("insert")
&& !qlt.startsWith("update") && !qlt.startsWith("delete")) {
if (qlt.startsWith("order by")) {
ql = "select * " + ql;
} else {
ql = "select * where " + ql;
}
}
try {
ANTLRStringStream in = new ANTLRStringStream(ql.trim());
QueryFilterLexer lexer = new QueryFilterLexer(in);
CommonTokenStream tokens = new CommonTokenStream(lexer);
QueryFilterParser parser = new QueryFilterParser(tokens);
Query q = parser.ql();
return q;
} catch (Exception e) {
logger.error("Unable to parse \"" + ql + "\"", e);
}
return null;
}
public static Query newQueryIfNull(Query query) {
if (query == null) {
query = new Query();
}
return query;
}
public static Query fromJsonString(String json) {
Object o = JsonUtils.parse(json);
if (o instanceof Map) {
@SuppressWarnings({ "unchecked", "rawtypes" })
Map<String, List<String>> params = cast(toMapList((Map) o));
return fromQueryParams(params);
}
return null;
}
public static Query fromQueryParams(Map<String, List<String>> params) {
String type = null;
Query q = null;
String ql = null;
String connection = null;
UUID start = null;
String cursor = null;
Integer limit = null;
List<String> permissions = null;
Boolean reversed = null;
Long startTime = null;
Long finishTime = null;
Boolean pad = null;
CounterResolution resolution = null;
List<Identifier> users = null;
List<Identifier> groups = null;
List<Identifier> identifiers = null;
List<String> categories = null;
List<CounterFilterPredicate> counterFilters = null;
List<String> l = null;
ql = first(params.get("ql"));
type = first(params.get("type"));
reversed = firstBoolean(params.get("reversed"));
connection = first(params.get("connection"));
start = firstUuid(params.get("start"));
cursor = first(params.get("cursor"));
limit = firstInteger(params.get("limit"));
permissions = params.get("permission");
startTime = firstLong(params.get("start_time"));
finishTime = firstLong(params.get("end_time"));
l = params.get("resolution");
if (!isEmpty(l)) {
resolution = CounterResolution.fromString(l.get(0));
}
users = Identifier.fromList(params.get("user"));
groups = Identifier.fromList(params.get("group"));
categories = params.get("category");
l = params.get("counter");
if (!isEmpty(l)) {
counterFilters = CounterFilterPredicate.fromList(l);
}
pad = firstBoolean(params.get("pad"));
for (Entry<String, List<String>> param : params.entrySet()) {
if ((param.getValue() == null) || (param.getValue().size() == 0)) {
Identifier identifier = Identifier.from(param.getKey());
if (identifier != null) {
if (identifiers == null) {
identifiers = new ArrayList<Identifier>();
}
identifiers.add(identifier);
}
}
}
if (ql != null) {
q = Query.fromQL(ql);
}
l = params.get("filter");
if (!isEmpty(l)) {
q = newQueryIfNull(q);
for (String s : l) {
q.addFilter(s);
}
}
l = params.get("sort");
if (!isEmpty(l)) {
q = newQueryIfNull(q);
for (String s : l) {
q.addSort(s);
}
}
if (type != null) {
q = newQueryIfNull(q);
q.setEntityType(type);
}
if (connection != null) {
q = newQueryIfNull(q);
q.setConnectionType(connection);
}
if (permissions != null) {
q = newQueryIfNull(q);
q.setPermissions(permissions);
}
if (start != null) {
q = newQueryIfNull(q);
q.setStartResult(start);
}
if (cursor != null) {
q = newQueryIfNull(q);
q.setCursor(cursor);
}
if (limit != null) {
q = newQueryIfNull(q);
q.setLimit(limit);
}
if (startTime != null) {
q = newQueryIfNull(q);
q.setStartTime(startTime);
}
if (finishTime != null) {
q = newQueryIfNull(q);
q.setFinishTime(finishTime);
}
if (resolution != null) {
q = newQueryIfNull(q);
q.setResolution(resolution);
}
if (categories != null) {
q = newQueryIfNull(q);
q.setCategories(categories);
}
if (counterFilters != null) {
q = newQueryIfNull(q);
q.setCounterFilters(counterFilters);
}
if (pad != null) {
q = newQueryIfNull(q);
q.setPad(pad);
}
if (users != null) {
q = newQueryIfNull(q);
q.setUsers(users);
}
if (groups != null) {
q = newQueryIfNull(q);
q.setGroups(groups);
}
if (identifiers != null) {
q = newQueryIfNull(q);
q.setIdentifiers(identifiers);
}
if (reversed != null) {
q = newQueryIfNull(q);
q.setReversed(reversed);
}
return q;
}
public static Query searchForProperty(String propertyName,
Object propertyValue) {
Query q = new Query();
q.addEqualityFilter(propertyName, propertyValue);
return q;
}
public static Query findForProperty(String propertyName,
Object propertyValue) {
Query q = new Query();
q.addEqualityFilter(propertyName, propertyValue);
q.setLimit(1);
return q;
}
public static Query fromUUID(UUID uuid) {
Query q = new Query();
q.addIdentifier(Identifier.fromUUID(uuid));
return q;
}
public static Query fromName(String name) {
Query q = new Query();
q.addIdentifier(Identifier.fromName(name));
return q;
}
public static Query fromEmail(String email) {
Query q = new Query();
q.addIdentifier(Identifier.fromEmail(email));
return q;
}
public static Query fromIdentifier(Object id) {
Query q = new Query();
q.addIdentifier(Identifier.from(id));
return q;
}
public boolean isIdsOnly() {
if ((selectSubjects.size() == 1)
&& selectSubjects.containsKey(PROPERTY_UUID)) {
level = Level.IDS;
return true;
}
return false;
}
public void setIdsOnly(boolean idsOnly) {
if (idsOnly) {
selectSubjects = new LinkedHashMap<String, String>();
selectSubjects.put(PROPERTY_UUID, PROPERTY_UUID);
level = Level.IDS;
} else if (isIdsOnly()) {
selectSubjects = new LinkedHashMap<String, String>();
level = Level.ALL_PROPERTIES;
}
}
public Level getResultsLevel() {
isIdsOnly();
return level;
}
public void setResultsLevel(Level level) {
setIdsOnly(level == Level.IDS);
this.level = level;
}
public Query withResultsLevel(Level level) {
setIdsOnly(level == Level.IDS);
this.level = level;
return this;
}
public String getEntityType() {
return type;
}
public void setEntityType(String type) {
this.type = type;
}
public Query withEntityType(String type) {
this.type = type;
return this;
}
public String getConnectionType() {
return connection;
}
public void setConnectionType(String connection) {
this.connection = connection;
}
public Query withConnectionType(String connection) {
this.connection = connection;
return this;
}
public List<String> getPermissions() {
return permissions;
}
public void setPermissions(List<String> permissions) {
this.permissions = permissions;
}
public Query withPermissions(List<String> permissions) {
this.permissions = permissions;
return this;
}
public Query addSelect(String select) {
return addSelect(select, null);
}
public Query addSelect(String select, String output) {
// be paranoid with the null checks because
// the query parser sometimes flakes out
if (select == null) {
return this;
}
select = select.trim();
if (select.equals("*")) {
return this;
}
if (StringUtils.isNotEmpty(output)) {
mergeSelectResults = true;
} else {
mergeSelectResults = false;
}
if (output == null) {
output = "";
}
selectSubjects.put(select, output);
return this;
}
public boolean hasSelectSubjects() {
return !selectSubjects.isEmpty();
}
public Set<String> getSelectSubjects() {
return selectSubjects.keySet();
}
public Map<String, String> getSelectAssignments() {
return selectSubjects;
}
public void setMergeSelectResults(boolean mergeSelectResults) {
this.mergeSelectResults = mergeSelectResults;
}
public Query withMergeSelectResults(boolean mergeSelectResults) {
this.mergeSelectResults = mergeSelectResults;
return this;
}
public boolean isMergeSelectResults() {
return mergeSelectResults;
}
public Query addSort(String propertyName) {
if (isBlank(propertyName)) {
return this;
}
propertyName = propertyName.trim();
if (propertyName.indexOf(',') >= 0) {
String[] propertyNames = split(propertyName, ',');
for (String s : propertyNames) {
addSort(s);
}
return this;
}
SortDirection direction = SortDirection.ASCENDING;
if (propertyName.indexOf(' ') >= 0) {
String[] parts = split(propertyName, ' ');
if (parts.length > 1) {
propertyName = parts[0];
direction = SortDirection.find(parts[1]);
}
} else if (propertyName.startsWith("-")) {
propertyName = propertyName.substring(1);
direction = SortDirection.DESCENDING;
} else if (propertyName.startsWith("+")) {
propertyName = propertyName.substring(1);
direction = SortDirection.ASCENDING;
}
return addSort(propertyName, direction);
}
public Query addSort(String propertyName, SortDirection direction) {
if (isBlank(propertyName)) {
return this;
}
propertyName = propertyName.trim();
for (SortPredicate s : sortPredicates) {
if (s.getPropertyName().equals(propertyName)) {
logger.error("Attempted to set sort order for "
+ s.getPropertyName() + " more than once, discardng...");
return this;
}
}
sortPredicates.add(new SortPredicate(propertyName, direction));
return this;
}
public Query addSort(SortPredicate sort) {
if (sort == null) {
return this;
}
for (SortPredicate s : sortPredicates) {
if (s.getPropertyName().equals(sort.getPropertyName())) {
logger.error("Attempted to set sort order for "
+ s.getPropertyName() + " more than once, discardng...");
return this;
}
}
sortPredicates.add(sort);
return this;
}
public List<SortPredicate> getSortPredicates() {
return sortPredicates;
}
public boolean hasSortPredicates() {
return !sortPredicates.isEmpty();
}
public Query addEqualityFilter(String propertyName, Object value) {
return addFilter(propertyName, FilterOperator.EQUAL, value);
}
public Query addFilter(String propertyName, FilterOperator operator,
Object value) {
if ((propertyName == null) || (operator == null) || (value == null)) {
return this;
}
if (PROPERTY_TYPE.equalsIgnoreCase(propertyName) && (value != null)) {
if (operator == FilterOperator.EQUAL) {
type = value.toString();
}
} else if ("connection".equalsIgnoreCase(propertyName)
&& (value != null)) {
if (operator == FilterOperator.EQUAL) {
connection = value.toString();
}
} else {
for (FilterPredicate f : filterPredicates) {
if (f.getPropertyName().equals(propertyName)
&& f.getValue().equals(value) && "*".equals(value)) {
logger.error("Attempted to set wildcard wilder for "
+ f.getPropertyName()
+ " more than once, discardng...");
return this;
}
}
filterPredicates.add(FilterPredicate.normalize(new FilterPredicate(
propertyName, operator, value)));
}
return this;
}
public Query addFilter(String filterStr) {
if (filterStr == null) {
return this;
}
FilterPredicate filter = FilterPredicate.valueOf(filterStr);
if ((filter != null) && (filter.propertyName != null)
&& (filter.operator != null) && (filter.value != null)) {
if (PROPERTY_TYPE.equalsIgnoreCase(filter.propertyName)) {
if (filter.operator == FilterOperator.EQUAL) {
type = filter.value.toString();
}
} else if ("connection".equalsIgnoreCase(filter.propertyName)) {
if (filter.operator == FilterOperator.EQUAL) {
connection = filter.value.toString();
}
} else {
for (FilterPredicate f : filterPredicates) {
if (f.getPropertyName().equals(filter.getPropertyName())
&& f.getValue().equals(filter.getValue())
&& "*".equals(filter.getValue())) {
logger.error("Attempted to set wildcard wilder for "
+ f.getPropertyName()
+ " more than once, discardng...");
return this;
}
}
filterPredicates.add(filter);
}
} else {
logger.error("Unable to add filter to query: " + filterStr);
}
return this;
}
public Query addFilter(FilterPredicate filter) {
filter = FilterPredicate.normalize(filter);
if ((filter != null) && (filter.propertyName != null)
&& (filter.operator != null) && (filter.value != null)) {
if (PROPERTY_TYPE.equalsIgnoreCase(filter.propertyName)) {
if (filter.operator == FilterOperator.EQUAL) {
type = filter.value.toString();
}
} else if ("connection".equalsIgnoreCase(filter.propertyName)) {
if (filter.operator == FilterOperator.EQUAL) {
connection = filter.value.toString();
}
} else {
filterPredicates.add(filter);
}
}
return this;
}
public List<FilterPredicate> getFilterPredicates() {
return filterPredicates;
}
public boolean hasFilterPredicates() {
return !filterPredicates.isEmpty();
}
public boolean hasFilterPredicatesExcludingSubkeys(
Map<String, Object> subkeyProperties) {
return !filterPredicates.isEmpty();
}
public Map<String, Object> getEqualityFilters() {
Map<String, Object> map = new LinkedHashMap<String, Object>();
for (FilterPredicate f : filterPredicates) {
if (f.operator == FilterOperator.EQUAL) {
Object val = f.getStartValue();
if (val != null) {
map.put(f.getPropertyName(), val);
}
}
}
return map.size() > 0 ? map : null;
}
public boolean hasFiltersForProperty(String name) {
return hasFiltersForProperty(FilterOperator.EQUAL, name);
}
public boolean hasFiltersForProperty(FilterOperator operator, String name) {
return getFilterForProperty(operator, name) != null;
}
public FilterPredicate getFilterForProperty(FilterOperator operator,
String name) {
if (name == null) {
return null;
}
ListIterator<FilterPredicate> iterator = filterPredicates
.listIterator();
while (iterator.hasNext()) {
FilterPredicate f = iterator.next();
if (f.propertyName.equalsIgnoreCase(name)) {
if (operator != null) {
if (operator == f.operator) {
return f;
}
} else {
return f;
}
}
}
return null;
}
public void removeFiltersForProperty(String name) {
if (name == null) {
return;
}
ListIterator<FilterPredicate> iterator = filterPredicates
.listIterator();
while (iterator.hasNext()) {
FilterPredicate f = iterator.next();
if (f.propertyName.equalsIgnoreCase(name)) {
iterator.remove();
}
}
}
public void setStartResult(UUID startResult) {
this.startResult = startResult;
}
public Query withStartResult(UUID startResult) {
this.startResult = startResult;
return this;
}
public UUID getStartResult() {
if ((startResult == null) && (cursor != null)) {
byte[] cursorBytes = decodeBase64(cursor);
if ((cursorBytes != null) && (cursorBytes.length == 16)) {
startResult = uuid(cursorBytes);
}
}
return startResult;
}
public String getCursor() {
return cursor;
}
public void setCursor(String cursor) {
if (cursor != null) {
if (cursor.length() == 22) {
byte[] cursorBytes = decodeBase64(cursor);
if ((cursorBytes != null) && (cursorBytes.length == 16)) {
startResult = uuid(cursorBytes);
cursor = null;
}
}
}
this.cursor = cursor;
}
public Query withCursor(String cursor) {
setCursor(cursor);
return this;
}
public int getLimit() {
return getLimit(DEFAULT_LIMIT);
}
public int getLimit(int defaultLimit) {
if (limit <= 0) {
if (defaultLimit > 0) {
return defaultLimit;
} else {
return DEFAULT_LIMIT;
}
}
return limit;
}
public void setLimit(int limit) {
limitSet = true;
this.limit = limit;
}
public Query withLimit(int limit) {
limitSet = true;
this.limit = limit;
return this;
}
public boolean isLimitSet() {
return limitSet;
}
public boolean isReversed() {
return reversed;
}
public void setReversed(boolean reversed) {
reversedSet = true;
this.reversed = reversed;
}
public boolean isReversedSet() {
return reversedSet;
}
public Long getStartTime() {
return startTime;
}
public void setStartTime(Long startTime) {
this.startTime = startTime;
}
public Long getFinishTime() {
return finishTime;
}
public void setFinishTime(Long finishTime) {
this.finishTime = finishTime;
}
public boolean isPad() {
return pad;
}
public void setPad(boolean pad) {
this.pad = pad;
}
public void setResolution(CounterResolution resolution) {
this.resolution = resolution;
}
public CounterResolution getResolution() {
return resolution;
}
public List<Identifier> getUsers() {
return users;
}
public void addUser(Identifier user) {
if (users == null) {
users = new ArrayList<Identifier>();
}
users.add(user);
}
public void setUsers(List<Identifier> users) {
this.users = users;
}
public List<Identifier> getGroups() {
return groups;
}
public void addGroup(Identifier group) {
if (groups == null) {
groups = new ArrayList<Identifier>();
}
groups.add(group);
}
public void setGroups(List<Identifier> groups) {
this.groups = groups;
}
public List<Identifier> getIdentifiers() {
return identifiers;
}
public void addIdentifier(Identifier identifier) {
if (identifiers == null) {
identifiers = new ArrayList<Identifier>();
}
identifiers.add(identifier);
}
public void setIdentifiers(List<Identifier> identifiers) {
this.identifiers = identifiers;
}
public boolean containsUuidIdentifersOnly() {
if (hasFilterPredicates()) {
return false;
}
if ((identifiers == null) || identifiers.isEmpty()) {
return false;
}
for (Identifier identifier : identifiers) {
if (!identifier.isUUID()) {
return false;
}
}
return true;
}
public boolean containsSingleUuidIdentifier() {
return containsUuidIdentifersOnly() && (identifiers.size() == 1);
}
public List<UUID> getUuidIdentifiers() {
if ((identifiers == null) || identifiers.isEmpty()) {
return null;
}
List<UUID> ids = new ArrayList<UUID>();
for (Identifier identifier : identifiers) {
if (identifier.isUUID()) {
ids.add(identifier.getUUID());
}
}
return ids;
}
public UUID getSingleUuidIdentifier() {
if (!containsSingleUuidIdentifier()) {
return null;
}
return (identifiers.get(0).getUUID());
}
public boolean containsNameIdentifiersOnly() {
if (hasFilterPredicates()) {
return false;
}
if ((identifiers == null) || identifiers.isEmpty()) {
return false;
}
for (Identifier identifier : identifiers) {
if (!identifier.isName()) {
return false;
}
}
return true;
}
public boolean containsSingleNameIdentifier() {
return containsNameIdentifiersOnly() && (identifiers.size() == 1);
}
public List<String> getNameIdentifiers() {
if ((identifiers == null) || identifiers.isEmpty()) {
return null;
}
List<String> names = new ArrayList<String>();
for (Identifier identifier : identifiers) {
if (identifier.isName()) {
names.add(identifier.getName());
}
}
return names;
}
public String getSingleNameIdentifier() {
if (!containsSingleNameIdentifier()) {
return null;
}
return (identifiers.get(0).toString());
}
public boolean containsEmailIdentifiersOnly() {
if (hasFilterPredicates()) {
return false;
}
if ((identifiers == null) || identifiers.isEmpty()) {
return false;
}
for (Identifier identifier : identifiers) {
if (identifier.isEmail()) {
return false;
}
}
return true;
}
public boolean containsSingleEmailIdentifier() {
return containsEmailIdentifiersOnly() && (identifiers.size() == 1);
}
public List<String> getEmailIdentifiers() {
if ((identifiers == null) || identifiers.isEmpty()) {
return null;
}
List<String> emails = new ArrayList<String>();
for (Identifier identifier : identifiers) {
if (identifier.isEmail()) {
emails.add(identifier.getEmail());
}
}
return emails;
}
public String getSingleEmailIdentifier() {
if (!containsSingleEmailIdentifier()) {
return null;
}
return (identifiers.get(0).toString());
}
public boolean containsNameOrEmailIdentifiersOnly() {
if (hasFilterPredicates()) {
return false;
}
if ((identifiers == null) || identifiers.isEmpty()) {
return false;
}
for (Identifier identifier : identifiers) {
if (!identifier.isEmail() && !identifier.isName()) {
return false;
}
}
return true;
}
public boolean containsSingleNameOrEmailIdentifier() {
return containsNameOrEmailIdentifiersOnly()
&& (identifiers.size() == 1);
}
public List<String> getNameAndEmailIdentifiers() {
if ((identifiers == null) || identifiers.isEmpty()) {
return null;
}
List<String> ids = new ArrayList<String>();
for (Identifier identifier : identifiers) {
if (identifier.isEmail()) {
ids.add(identifier.getEmail());
} else if (identifier.isName()) {
ids.add(identifier.getName());
}
}
return ids;
}
public String getSingleNameOrEmailIdentifier() {
if (!containsSingleNameOrEmailIdentifier()) {
return null;
}
return (identifiers.get(0).toString());
}
public List<String> getCategories() {
return categories;
}
public void addCategory(String category) {
if (categories == null) {
categories = new ArrayList<String>();
}
categories.add(category);
}
public void setCategories(List<String> categories) {
this.categories = categories;
}
public List<CounterFilterPredicate> getCounterFilters() {
return counterFilters;
}
public void addCounterFilter(String counter) {
CounterFilterPredicate p = CounterFilterPredicate.fromString(counter);
if (p == null) {
return;
}
if (counterFilters == null) {
counterFilters = new ArrayList<CounterFilterPredicate>();
}
counterFilters.add(p);
}
public void setCounterFilters(List<CounterFilterPredicate> counterFilters) {
this.counterFilters = counterFilters;
}
@Override
public String toString() {
if (selectSubjects.isEmpty() && filterPredicates.isEmpty()) {
return "";
}
StringBuilder s = new StringBuilder("select ");
if (type == null) {
if (selectSubjects.isEmpty()) {
s.append("*");
} else {
if (mergeSelectResults) {
s.append("{ ");
boolean first = true;
for (Map.Entry<String, String> select : selectSubjects
.entrySet()) {
if (!first) {
s.append(", ");
}
s.append(select.getValue() + " : " + select.getKey());
first = false;
}
s.append(" }");
} else {
boolean first = true;
for (String select : selectSubjects.keySet()) {
if (!first) {
s.append(", ");
}
s.append(select);
first = false;
}
}
}
} else {
s.append(type);
}
if (!filterPredicates.isEmpty()) {
s.append(" where ");
boolean first = true;
for (FilterPredicate f : filterPredicates) {
if (!first) {
s.append(" and ");
}
s.append(f.toString());
first = false;
}
}
return s.toString();
}
public static enum FilterOperator {
LESS_THAN("<", "lt"), LESS_THAN_OR_EQUAL("<=", "lte"), GREATER_THAN(
">", "gt"), GREATER_THAN_OR_EQUAL(">=", "gte"), EQUAL("=", "eq"), NOT_EQUAL(
"!=", "ne"), IN("in", null), CONTAINS("contains", null), WITHIN(
"within", null);
private final String shortName;
private final String textName;
FilterOperator(String shortName, String textName) {
this.shortName = shortName;
this.textName = textName;
}
static Map<String, FilterOperator> nameMap = new ConcurrentHashMap<String, FilterOperator>();
static {
for (FilterOperator op : EnumSet.allOf(FilterOperator.class)) {
if (op.shortName != null) {
nameMap.put(op.shortName, op);
}
if (op.textName != null) {
nameMap.put(op.textName, op);
}
}
}
public static FilterOperator find(String s) {
if (s == null) {
return null;
}
return nameMap.get(s);
}
@Override
public String toString() {
return shortName;
}
}
public static enum SortDirection {
ASCENDING, DESCENDING;
public static SortDirection find(String s) {
if (s == null) {
return ASCENDING;
}
s = s.toLowerCase();
if (s.startsWith("asc")) {
return ASCENDING;
}
if (s.startsWith("des")) {
return DESCENDING;
}
if (s.equals("+")) {
return ASCENDING;
}
if (s.equals("-")) {
return DESCENDING;
}
return ASCENDING;
}
}
public static final class SortPredicate implements Serializable {
private static final long serialVersionUID = 1L;
private final String propertyName;
private final Query.SortDirection direction;
public SortPredicate(String propertyName, Query.SortDirection direction) {
if (propertyName == null) {
throw new NullPointerException("Property name was null");
}
if (direction == null) {
direction = SortDirection.ASCENDING;
}
this.propertyName = propertyName.trim();
this.direction = direction;
}
public SortPredicate(String propertyName, String direction) {
this(propertyName, SortDirection.find(direction));
}
public String getPropertyName() {
return propertyName;
}
public Query.SortDirection getDirection() {
return direction;
}
public FilterPredicate toFilter() {
return new FilterPredicate(propertyName, FilterOperator.EQUAL, "*");
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if ((o == null) || (super.getClass() != o.getClass())) {
return false;
}
SortPredicate that = (SortPredicate) o;
if (direction != that.direction) {
return false;
}
return (propertyName.equals(that.propertyName));
}
@Override
public int hashCode() {
int result = propertyName.hashCode();
result = (31 * result) + direction.hashCode();
return result;
}
@Override
public String toString() {
return propertyName
+ ((direction == Query.SortDirection.DESCENDING) ? " DESC"
: "");
}
}
public static final class FilterPredicate implements Serializable {
private static final long serialVersionUID = 1L;
private final String propertyName;
private final Query.FilterOperator operator;
private final Object value;
private String cursor;
@SuppressWarnings({ "rawtypes", "unchecked" })
public FilterPredicate(String propertyName,
Query.FilterOperator operator, Object value) {
if (propertyName == null) {
throw new NullPointerException("Property name was null");
}
if (operator == null) {
throw new NullPointerException("Operator was null");
}
if ((operator == Query.FilterOperator.IN)
|| (operator == Query.FilterOperator.WITHIN)) {
if ((!(value instanceof Collection))
&& (value instanceof Iterable)) {
List newValue = new ArrayList();
for (Iterator i$ = ((Iterable) value).iterator(); i$
.hasNext();) {
Object val = i$.next();
newValue.add(val);
}
value = newValue;
}
// DataTypeUtils.checkSupportedValue(propertyName, value, true,
// true);
} else {
// DataTypeUtils.checkSupportedValue(propertyName, value, false,
// false);
}
this.propertyName = propertyName;
this.operator = operator;
this.value = value;
}
public FilterPredicate(String propertyName, String operator,
String value, String secondValue, String thirdValue) {
this.propertyName = propertyName;
this.operator = FilterOperator.find(operator);
Object first_obj = parseValue(value, 0);
Object second_obj = parseValue(secondValue, 0);
Object third_obj = parseValue(thirdValue, 0);
if (second_obj != null) {
if (third_obj != null) {
this.value = Arrays
.asList(first_obj, second_obj, third_obj);
} else {
this.value = Arrays.asList(first_obj, second_obj);
}
} else {
this.value = first_obj;
}
}
public FilterPredicate(String propertyName, String operator,
String value, int valueType, String secondValue,
int secondValueType, String thirdValue, int thirdValueType) {
this.propertyName = propertyName;
this.operator = FilterOperator.find(operator);
Object first_obj = parseValue(value, valueType);
Object second_obj = parseValue(secondValue, secondValueType);
Object third_obj = parseValue(thirdValue, thirdValueType);
if (second_obj != null) {
if (third_obj != null) {
this.value = Arrays
.asList(first_obj, second_obj, third_obj);
} else {
this.value = Arrays.asList(first_obj, second_obj);
}
} else {
this.value = first_obj;
}
}
private static Object parseValue(String val, int valueType) {
if (val == null) {
return null;
}
if (val.startsWith("'") && (val.length() > 1)) {
return val.substring(1, val.length() - 1);
}
if (val.equalsIgnoreCase("true") || val.equalsIgnoreCase("false")) {
return Boolean.valueOf(val);
}
if (val.length() == 36) {
try {
return UUID.fromString(val);
} catch (IllegalArgumentException e) {
}
}
try {
return Long.valueOf(val);
} catch (NumberFormatException e) {
}
try {
return Float.valueOf(val);
} catch (NumberFormatException e) {
}
return null;
}
public static FilterPredicate valueOf(String str) {
if (str == null) {
return null;
}
try {
ANTLRStringStream in = new ANTLRStringStream(str.trim());
QueryFilterLexer lexer = new QueryFilterLexer(in);
CommonTokenStream tokens = new CommonTokenStream(lexer);
QueryFilterParser parser = new QueryFilterParser(tokens);
FilterPredicate filter = parser.filter();
return normalize(filter);
} catch (Exception e) {
logger.error("Unable to parse \"" + str + "\"", e);
}
return null;
}
public static FilterPredicate normalize(FilterPredicate p) {
if (p == null) {
return null;
}
if (p.operator == FilterOperator.CONTAINS) {
String propertyName = appendSuffix(p.propertyName, "keywords");
return new FilterPredicate(propertyName, FilterOperator.EQUAL,
p.value);
} else if (p.operator == FilterOperator.WITHIN) {
String propertyName = appendSuffix(p.propertyName,
"coordinates");
return new FilterPredicate(propertyName, FilterOperator.WITHIN,
p.value);
}
return p;
}
private static String appendSuffix(String str, String suffix) {
if (StringUtils.isNotEmpty(str)) {
if (!str.endsWith("." + suffix)) {
str += "." + suffix;
}
} else {
str = suffix;
}
return str;
}
public String getPropertyName() {
return propertyName;
}
public Query.FilterOperator getOperator() {
return operator;
}
public Object getValue() {
return value;
}
@SuppressWarnings("unchecked")
public Object getStartValue() {
if (value instanceof List) {
List<Object> l = (List<Object>) value;
return l.get(0);
}
if ((operator == FilterOperator.GREATER_THAN)
|| (operator == FilterOperator.GREATER_THAN_OR_EQUAL)
|| (operator == FilterOperator.EQUAL)) {
return value;
} else {
return null;
}
}
@SuppressWarnings("unchecked")
public Object getFinishValue() {
if (value instanceof List) {
List<Object> l = (List<Object>) value;
if (l.size() > 1) {
return l.get(1);
}
return null;
}
if ((operator == FilterOperator.LESS_THAN)
|| (operator == FilterOperator.LESS_THAN_OR_EQUAL)
|| (operator == FilterOperator.EQUAL)) {
return value;
} else {
return null;
}
}
public void setCursor(String cursor) {
this.cursor = cursor;
}
public String getCursor() {
return cursor;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result)
+ ((operator == null) ? 0 : operator.hashCode());
result = (prime * result)
+ ((propertyName == null) ? 0 : propertyName.hashCode());
result = (prime * result)
+ ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
FilterPredicate other = (FilterPredicate) obj;
if (operator != other.operator) {
return false;
}
if (propertyName == null) {
if (other.propertyName != null) {
return false;
}
} else if (!propertyName.equals(other.propertyName)) {
return false;
}
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
@Override
public String toString() {
String valueStr = "\'\'";
if (value != null) {
if (value instanceof String) {
valueStr = "\'" + value + "\'";
} else {
valueStr = value.toString();
}
}
return propertyName + " " + operator.toString() + " " + valueStr;
}
}
public static final class CounterFilterPredicate implements Serializable {
private static final long serialVersionUID = 1L;
private final String name;
private final Identifier user;
private final Identifier group;
private final String queue;
private final String category;
public CounterFilterPredicate(String name, Identifier user,
Identifier group, String queue, String category) {
this.name = name;
this.user = user;
this.group = group;
this.queue = queue;
this.category = category;
}
public Identifier getUser() {
return user;
}
public Identifier getGroup() {
return group;
}
public String getQueue() {
return queue;
}
public String getCategory() {
return category;
}
public String getName() {
return name;
}
public static CounterFilterPredicate fromString(String s) {
Identifier user = null;
Identifier group = null;
String category = null;
String name = null;
String[] l = split(s, ':');
if (l.length > 0) {
if (!"*".equals(l[0])) {
name = l[0];
}
}
if (l.length > 1) {
if (!"*".equals(l[1])) {
user = Identifier.from(l[1]);
}
}
if (l.length > 2) {
if (!"*".equals(l[2])) {
group = Identifier.from(l[3]);
}
}
if (l.length > 3) {
if (!"*".equals(l[3])) {
category = l[3];
}
}
if ((user == null) && (group == null) && (category == null)
&& (name == null)) {
return null;
}
return new CounterFilterPredicate(name, user, group, null, category);
}
public static List<CounterFilterPredicate> fromList(List<String> l) {
if ((l == null) || (l.size() == 0)) {
return null;
}
List<CounterFilterPredicate> counterFilters = new ArrayList<CounterFilterPredicate>();
for (String s : l) {
CounterFilterPredicate filter = CounterFilterPredicate
.fromString(s);
if (filter != null) {
counterFilters.add(filter);
}
}
if (counterFilters.size() == 0) {
return null;
}
return counterFilters;
}
}
public List<Object> getSelectionResults(Results rs) {
List<Entity> entities = rs.getEntities();
if (entities == null) {
return null;
}
if (!hasSelectSubjects()) {
return cast(entities);
}
List<Object> results = new ArrayList<Object>();
for (Entity entity : entities) {
if (isMergeSelectResults()) {
boolean include = false;
Map<String, Object> result = new LinkedHashMap<String, Object>();
Map<String, String> selects = getSelectAssignments();
for (Map.Entry<String, String> select : selects.entrySet()) {
Object obj = JsonUtils.select(entity, select.getKey(),
false);
if (obj == null) {
obj = "";
} else {
include = true;
}
result.put(select.getValue(), obj);
}
if (include) {
results.add(result);
}
} else {
boolean include = false;
List<Object> result = new ArrayList<Object>();
Set<String> selects = getSelectSubjects();
for (String select : selects) {
Object obj = JsonUtils.select(entity, select);
if (obj == null) {
obj = "";
} else {
include = true;
}
result.add(obj);
}
if (include) {
results.add(result);
}
}
}
if (results.size() == 0) {
return null;
}
return results;
}
public Object getSelectionResult(Results rs) {
List<Object> r = getSelectionResults(rs);
if ((r != null) && (r.size() > 0)) {
return r.get(0);
}
return null;
}
}
| |
package com.badlogic.gdx.lang;
import com.badlogic.gdx.concurrent.ThreadLocalInstance;
import com.badlogic.gdx.function.Consumer;
import java.lang.reflect.Array;
/**
* Utility class to box typed values. Can be used to capture non-final
* variables for lambda functions.
*
* <pre>
* {@code
* int getSum(Iterable<Integer> container) {
* final Box.Integer sum = new Box.Integer(0);
* container.forEach(element -> sum.set(sum.get() + element));
* return sum.get();
* }
* }
* </pre>
*
* For boxed primitive types there is a small, thread-local storage which holds
* one per-thread instance of each sub-class.
*
* <pre>
* {@code
* int getSum(Iterable<Integer> container) {
* final Box.Integer sum = Box.borrowInteger();
* sum.set(0);
* container.forEach(element -> sum.set(sum.get() + element));
* int result = sum.get();
* Box.releaseInteger();
* return result;
* }
* }
* </pre>
*
* There's also a version which uses try-with-resources internally.
*
* <pre>
* {@code
* int getSum(Iterable<Integer> container) {
* return Box.withInteger(sum -> {
* sum.set(0);
* container.forEach(element -> sum.set(sum.get() + element));
* });
* }
* }
* </pre>
*
*/
public final class Box {
public static final class Boolean {
private boolean value;
@SuppressWarnings("unused")
Boolean() {
this.value = false;
}
public Boolean(boolean value) {
this.value = value;
}
public boolean get() {
return value;
}
public Boolean set(boolean value) {
this.value = value;
return this;
}
}
public static final class Integer {
private int value;
@SuppressWarnings("unused")
Integer() {
this.value = 0;
}
public Integer(int value) {
this.value = value;
}
public int get() {
return value;
}
public int getAndIncrement() {
return value++;
}
public Integer set(int value) {
this.value = value;
return this;
}
public Integer add(int value) {
this.value += value;
return this;
}
}
public static final class Float {
private float value;
@SuppressWarnings("unused")
Float() {
this.value = 0.0f;
}
public Float(float value) {
this.value = value;
}
public float get() {
return value;
}
public Float set(float value) {
this.value = value;
return this;
}
}
public static final class Reference<R> {
private R value;
public Reference(R value) {
this.value = value;
}
public R get() {
return value;
}
public boolean isNull() {
return value == null;
}
public Reference<R> set(R value) {
this.value = value;
return this;
}
}
private static class BorrowChecker<B> implements AutoCloseable {
private final B[] references;
private int locks = 0;
private static final int cacheSize = 4;
@SuppressWarnings("unchecked")
private BorrowChecker(Class<B> clazz) {
try {
references = (B[]) Array.newInstance(clazz, cacheSize);
for (int i = 0; i < cacheSize; i++) {
references[i] = clazz.newInstance();
}
} catch (InstantiationException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
BorrowChecker<B> borrow() {
if (locks >= cacheSize) {
throw new RuntimeException("Too many nested borrows!");
}
locks++;
return this;
}
B reference() {
return references[locks - 1];
}
@Override
public void close() {
locks--;
}
}
private static final ThreadLocal<BorrowChecker<Boolean>> tlsBoolean =
new ThreadLocalInstance<>(() -> new BorrowChecker<>(Boolean.class));
private static final ThreadLocal<BorrowChecker<Integer>> tlsInteger =
new ThreadLocalInstance<>(() -> new BorrowChecker<>(Integer.class));
private static final ThreadLocal<BorrowChecker<Float>> tlsFloat =
new ThreadLocalInstance<>(() -> new BorrowChecker<>(Float.class));
public static Boolean borrowBoolean() {
return tlsBoolean.get().borrow().reference();
}
public static boolean releaseBoolean() {
BorrowChecker<Boolean> value = tlsBoolean.get();
boolean result = value.reference().get();
value.close();
return result;
}
public static boolean withBoolean(Consumer<Boolean> consumer) {
try (BorrowChecker<Boolean> value = tlsBoolean.get().borrow()) {
consumer.accept(value.reference());
return value.reference().get();
}
}
public static Integer borrowInteger() {
return tlsInteger.get().borrow().reference();
}
public static int releaseInteger() {
BorrowChecker<Integer> value = tlsInteger.get();
int result = value.reference().get();
value.close();
return result;
}
public static int withInteger(Consumer<Integer> consumer) {
try (BorrowChecker<Integer> value = tlsInteger.get().borrow()) {
consumer.accept(value.reference());
return value.reference().get();
}
}
public static float withFloat(Consumer<Float> consumer) {
try (BorrowChecker<Float> value = tlsFloat.get().borrow()) {
consumer.accept(value.reference());
return value.reference().get();
}
}
}
| |
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.jube.replicator;
import java.util.List;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
import javax.inject.Singleton;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import io.fabric8.groups.Group;
import io.fabric8.groups.GroupListener;
import io.fabric8.groups.internal.ZooKeeperGroup;
import io.fabric8.jube.KubernetesModel;
import io.fabric8.jube.apimaster.ApiMasterKubernetesModel;
import io.fabric8.jube.apimaster.ApiMasterService;
import io.fabric8.jube.local.NodeHelper;
import io.fabric8.jube.process.ProcessManager;
import io.fabric8.kubernetes.api.KubernetesHelper;
import io.fabric8.kubernetes.api.model.ReplicationControllerState;
import io.fabric8.kubernetes.api.model.PodState;
import io.fabric8.kubernetes.api.model.Container;
import io.fabric8.kubernetes.api.model.ContainerStatus;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.api.model.PodTemplate;
import io.fabric8.kubernetes.api.model.ReplicationController;
import io.fabric8.utils.Closeables;
import io.fabric8.utils.Filter;
import io.fabric8.utils.Filters;
import io.fabric8.utils.Objects;
import io.hawt.util.Strings;
import org.apache.curator.framework.CuratorFramework;
import org.apache.deltaspike.core.api.config.ConfigProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Monitors the status of the current replication controllers and pods and chooses to start new pods if there are not enough replicas
*/
@Singleton
public class Replicator {
private static final transient Logger LOG = LoggerFactory.getLogger(Replicator.class);
private static final String KUBERNETES_REPLICATOR = "/kubernetes/replicator";
private final CuratorFramework curator;
private final ApiMasterKubernetesModel model;
private final ProcessManager processManager;
private final long pollTime;
private final Timer timer = new Timer();
private final GroupListener<ReplicatorNode> groupListener;
private ZooKeeperGroup<ReplicatorNode> group;
private AtomicBoolean timerEnabled = new AtomicBoolean(false);
private AtomicBoolean master = new AtomicBoolean(false);
@Inject
public Replicator(CuratorFramework curator,
ApiMasterKubernetesModel model,
ProcessManager processManager,
@ConfigProperty(name = "REPLICATOR_POLL_TIME", defaultValue = "2000")
long pollTime) {
this.curator = curator;
this.model = model;
this.processManager = processManager;
this.pollTime = pollTime;
System.out.println("Starting the replicator with poll time: " + pollTime);
group = new ZooKeeperGroup<ReplicatorNode>(curator, KUBERNETES_REPLICATOR, ReplicatorNode.class);
groupListener = new GroupListener<ReplicatorNode>() {
@Override
public void groupEvent(Group<ReplicatorNode> group, GroupEvent event) {
onGroupEvent(group, event);
}
};
group.add(groupListener);
group.update(createState());
group.start();
enableTimer();
}
@PreDestroy
public void destroy() {
disableTimer();
group.remove(groupListener);
Closeables.closeQuietly(group);
group = null;
disableTimer();
}
public boolean isMaster() {
return group.isMaster() && master.get();
}
public void enableMaster() {
if (master.compareAndSet(false, true)) {
enableTimer();
LOG.info("Replicator is the master");
System.out.println("====== Replicator is the master");
group.update(createState());
}
}
protected void disableMaster() {
if (master.compareAndSet(true, false)) {
LOG.info("Replicator is not the master");
System.out.println("====== Replicator is NOT the master");
group.update(createState());
disableTimer();
}
}
protected void onGroupEvent(Group<ReplicatorNode> group, GroupListener.GroupEvent event) {
switch (event) {
case CONNECTED:
case CHANGED:
if (isValid()) {
try {
if (group.isMaster()) {
enableMaster();
} else {
disableMaster();
}
} catch (IllegalStateException e) {
// Ignore
}
} else {
LOG.info("Not valid with master: " + group.isMaster()
+ " curator: " + curator);
}
break;
case DISCONNECTED:
default:
}
}
protected boolean isValid() {
return true;
}
protected void autoScale() throws Exception {
if (!isMaster()) {
return;
}
ImmutableSet<Map.Entry<String, ReplicationController>> entries = model.getReplicationControllerMap().entrySet();
for (Map.Entry<String, ReplicationController> entry : entries) {
String rcID = entry.getKey();
ReplicationController replicationController = entry.getValue();
PodState podTemplatePodState = NodeHelper.getPodTemplateDesiredState(replicationController);
if (podTemplatePodState == null) {
LOG.warn("Cannot instantiate replication controller: " + replicationController.getId() + " due to missing PodTemplate.PodState!");
continue;
}
int replicaCount = 0;
ReplicationControllerState desiredState = replicationController.getDesiredState();
if (desiredState != null) {
Integer replicas = desiredState.getReplicas();
if (replicas != null && replicas > 0) {
replicaCount = replicas;
}
}
ReplicationControllerState currentState = NodeHelper.getOrCreateCurrentState(replicationController);
Map<String, String> replicaSelector = desiredState.getReplicaSelector();
ImmutableList<Pod> allPods = model.getPods(replicaSelector);
List<Pod> pods = Filters.filter(allPods, podHasNotTerminated());
int currentSize = pods.size();
Integer currentSizeInt = new Integer(currentSize);
if (!Objects.equal(currentSizeInt, currentState.getReplicas())) {
currentState.setReplicas(currentSizeInt);
model.updateReplicationController(rcID, replicationController);
}
int createCount = replicaCount - currentSize;
if (createCount > 0) {
pods = createMissingContainers(replicationController, podTemplatePodState, desiredState, createCount, pods);
} else if (createCount < 0) {
int deleteCount = Math.abs(createCount);
pods = deleteContainers(pods, deleteCount);
}
}
}
/**
* Returns a filter of all terminated pods
*/
public static Filter<Pod> podHasNotTerminated() {
return new Filter<Pod>() {
@Override
public String toString() {
return "PodHasNotTerminatedFilter";
}
@Override
public boolean matches(Pod pod) {
PodState currentState = pod.getCurrentState();
if (currentState != null) {
String status = currentState.getStatus();
if (status != null) {
String lower = status.toLowerCase();
if (lower.startsWith("error") || lower.startsWith("fail") || lower.startsWith("term")) {
return false;
}
}
}
return true;
}
};
}
private ImmutableList<Pod> deleteContainers(List<Pod> pods, int deleteCount) throws Exception {
List<Pod> list = Lists.newArrayList(pods);
for (int i = 0, size = list.size(); i < deleteCount && i < size; i++) {
Pod removePod = list.remove(size - i - 1);
String id = removePod.getId();
model.deleteRemotePod(removePod);
}
return ImmutableList.copyOf(list);
}
protected ImmutableList<Pod> createMissingContainers(ReplicationController replicationController, PodState podTemplateDesiredState,
ReplicationControllerState desiredState, int createCount, List<Pod> pods) throws Exception {
// TODO this is a hack ;) needs replacing with the real host we're creating on
String host = ApiMasterService.getHostName();
List<Pod> list = Lists.newArrayList(pods);
for (int i = 0; i < createCount; i++) {
Pod pod = new Pod();
pod.setKind(NodeHelper.KIND_POD);
createNewId(replicationController, pod);
list.add(pod);
List<Container> containers = KubernetesHelper.getContainers(podTemplateDesiredState);
for (Container container : containers) {
String containerName = pod.getId() + "-" + container.getName();
ContainerStatus containerInfo = NodeHelper.getOrCreateContainerInfo(pod, containerName);
PodState currentState = pod.getCurrentState();
Objects.notNull(currentState, "currentState");
currentState.setHost(host);
String image = container.getImage();
if (Strings.isBlank(image)) {
LOG.warn("Missing image for " + containerName + " so cannot create it!");
continue;
}
NodeHelper.addOrUpdateDesiredContainer(pod, containerName, container);
}
PodTemplate podTemplate = desiredState.getPodTemplate();
if (podTemplate != null) {
pod.setLabels(podTemplate.getLabels());
}
// TODO should we update the pod now we've updated it?
List<Container> desiredContainers = NodeHelper.getOrCreatePodDesiredContainers(pod);
model.remoteCreatePod(pod);
}
return ImmutableList.copyOf(list);
}
protected String createNewId(ReplicationController replicationController, Pod pod) {
String id = replicationController.getId();
if (Strings.isNotBlank(id)) {
id += "-";
int idx = 1;
while (true) {
String anId = id + (idx++);
if (model.updatePodIfNotExist(anId, pod)) {
pod.setId(anId);
return null;
}
}
}
id = model.createID(NodeHelper.KIND_POD);
pod.setId(id);
return null;
}
protected void enableTimer() {
if (timerEnabled.compareAndSet(false, true)) {
TimerTask timerTask = new TimerTask() {
@Override
public void run() {
LOG.debug("Replicator Timer");
try {
autoScale();
} catch (Exception e) {
System.out.println("Caught: " + e);
e.printStackTrace();
LOG.warn("Caught: " + e, e);
}
}
};
timer.schedule(timerTask, this.pollTime, this.pollTime);
}
}
protected void disableTimer() {
System.out.println("disabling the Replicator timer!");
timer.cancel();
timerEnabled.set(false);
}
private ReplicatorNode createState() {
ReplicatorNode state = new ReplicatorNode();
return state;
}
public long getPollTime() {
return pollTime;
}
public KubernetesModel getModel() {
return model;
}
}
| |
/*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.gui.components;
import com.haulmont.chile.core.model.MetaPropertyPath;
import com.haulmont.cuba.gui.components.filter.FilterDelegate;
import com.haulmont.cuba.gui.data.CollectionDatasource;
import com.haulmont.cuba.gui.model.CollectionLoader;
import com.haulmont.cuba.gui.model.DataLoader;
import com.haulmont.cuba.security.entity.FilterEntity;
import java.util.List;
public interface Filter extends HasMargin, Component.BelongToFrame, HasNamedComponents,
Component.HasXmlDescriptor, HasSettings, Component.HasCaption, HasContextHelp,
Component.HasIcon, Collapsable, Component.Focusable {
String NAME = "filter";
/**
* Action registered with parent frame to apply filter.
*/
String APPLY_ACTION_ID = "applyFilter";
/**
* Action registered with parent frame to open filter select menu.
*/
String SELECT_ACTION_ID = "selectFilter";
interface FilterEntityChangeListener {
void filterEntityChanged(FilterEntity filterEntity);
}
interface BeforeFilterAppliedHandler {
boolean beforeFilterApplied();
}
interface AfterFilterAppliedHandler {
void afterFilterApplied();
}
BeforeFilterAppliedHandler getBeforeFilterAppliedHandler();
/**
* Sets the handler that will be invoked before the filter is applied. If the {@link
* BeforeFilterAppliedHandler#beforeFilterApplied()} returns false, then the filter won't be applied
*
* @param beforeFilterAppliedHandler
*/
void setBeforeFilterAppliedHandler(BeforeFilterAppliedHandler beforeFilterAppliedHandler);
AfterFilterAppliedHandler getAfterFilterAppliedHandler();
void setAfterFilterAppliedHandler(AfterFilterAppliedHandler afterFilterAppliedHandler);
/**
* Returns {@link DataLoader} which the filter is applied to.
*/
CollectionLoader getDataLoader();
/**
* Sets {@link DataLoader} which the filter is applied to.
*/
void setDataLoader(CollectionLoader loader);
CollectionDatasource getDatasource();
void setDatasource(CollectionDatasource datasource);
void setFilterEntity(FilterEntity filterEntity);
/**
* Applies the filter. Before the filter is applied, conditions correctness is checked. If invalid conditions are
* found (i.e. empty required conditions) then the filter will not be applied.
*
* @param options an options object
*/
boolean apply(FilterOptions options);
/**
* Applies the filter. Before the filter is applied, conditions correctness is checked. If invalid conditions are
* found (i.e. empty required conditions) then the filter will not be applied.
*
* @param notifyInvalidConditions whether a notification about invalid conditions values should be displayed
* @return true if the filter was applied, false otherwise
*/
boolean apply(boolean notifyInvalidConditions);
/**
* Sets rows count restriction. Particularly useful when maxResults field is hidden. 0 in case of no limits.
*
* @param maxResults restriction on number of rows
*/
void setMaxResults(int maxResults);
int getMaxResults();
/**
* Sets filter mode switch visibility
*/
void setModeSwitchVisible(boolean modeSwitchVisible);
/**
* Changes the filter mode and repaints the filter layout
*/
void switchFilterMode(FilterDelegate.FilterMode filterMode);
/**
* Whether to show field for rows count restriction. <p>Automatically set to false for {@code
* HierarchicalDatasource}.
*/
void setUseMaxResults(boolean useMaxResults);
boolean getUseMaxResults();
/**
* Whether to use a text field for entering a max results value. LookupField is used by default.
*
* @param textMaxResults true if use TextField
*/
void setTextMaxResults(boolean textMaxResults);
boolean getTextMaxResults();
/**
* Sets the component associated with the filter.
*/
void setApplyTo(Component component);
Component getApplyTo();
/**
* Defines when the filter will be applied. If the attribute value is false, the filter (default or empty) will be
* applied when the screen is opened. It means that the datasource will be refreshed and linked components (e.g.
* Table) will display data. If the value is true, the filter will be applied only after the Search button is
* clicked.
*/
void setManualApplyRequired(Boolean manualApplyRequired);
Boolean getManualApplyRequired();
void setEditable(boolean editable);
boolean isEditable();
void setFolderActionsEnabled(boolean enabled);
boolean isFolderActionsEnabled();
/**
* Sets the value to the filter parameter component. Do not use this method in init() method of screen controller,
* because filter is not initialized by that time. The proper place to use the method is ready() method of the
* controller.
*
* @param paramName parameter name. It can be found at runtime in the filter editor window. Right click at the
* necessary condition and select 'Show component name' item in the popup menu. Component name
* there will be like 'component$genericFilter.email12482'. {@code paramName} parameter in this
* method requires only the last part of this string, i.e. you should pass 'email12482'
* @param value parameter value
*/
void setParamValue(String paramName, Object value);
/**
* Gets the value of the filter parameter component. Do not use this method in init() method of screen controller,
* because filter is not initialized by that time. The proper place to use the method is ready() method of the
* controller.
*
* @param paramName parameter name. It can be found at runtime in the filter editor window. Right click at the
* necessary condition and select 'Show component name' item in the popup menu. Component name
* there will be like 'component$genericFilter.email12482'. {@code paramName} parameter in this
* method requires only the last part of this string, i.e. you should pass 'email12482'
* @return parameter value
*/
Object getParamValue(String paramName);
void addFilterEntityChangeListener(FilterEntityChangeListener listener);
List<FilterEntityChangeListener> getFilterEntityChangeListeners();
/**
* Number of conditions to be displayed in one row
*/
void setColumnsCount(int columnsCount);
int getColumnsCount();
/**
* Sets whether border is visible.
*
* @param visible <code>true</code> to show the border, <code>false</code> to hide it
*/
void setBorderVisible(boolean visible);
/**
* Determines whether or not border is visible.
*
* @return <code>true</code> if the border is visible
*/
boolean isBorderVisible();
/**
* Enables to setup which properties should be available for filtering.
*
* @param predicate properties filter predicate
*/
void setPropertiesFilterPredicate(PropertiesFilterPredicate predicate);
/**
* @return properties filter predicate
*/
PropertiesFilterPredicate getPropertiesFilterPredicate();
/**
* A predicate that tests whether a property with the given path should be available for filtering.
*/
@FunctionalInterface
interface PropertiesFilterPredicate {
/**
* @param metaPropertyPath {@link MetaPropertyPath} instance
* @return true if property with given {@code metaPropertyPath} should be available for filtering or false otherwise
*/
boolean test(MetaPropertyPath metaPropertyPath);
}
class FilterOptions {
protected boolean notifyInvalidConditions;
protected boolean loadData = true;
public static FilterOptions create() {
return new FilterOptions();
}
/**
* @return whether a notification about invalid conditions values should be displayed
*/
public boolean isNotifyInvalidConditions() {
return notifyInvalidConditions;
}
/**
* Sets whether a notification about invalid conditions values should be displayed.
*
* @param notifyInvalidConditions whether a notification about invalid conditions values should be displayed
* @return this object
*/
public FilterOptions setNotifyInvalidConditions(boolean notifyInvalidConditions) {
this.notifyInvalidConditions = notifyInvalidConditions;
return this;
}
/**
* @return whether data needs to be loaded after filter is applied
*/
public boolean isLoadData() {
return loadData;
}
/**
* Sets whether data needs to be loaded after filter is applied.
*
* @param loadData whether data needs to be loaded after filter is applied
* @return this object
*/
public FilterOptions setLoadData(boolean loadData) {
this.loadData = loadData;
return this;
}
}
}
| |
/*
* Copyright (c) 1998, 2008, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.doclets.internal.toolkit.util;
import com.sun.javadoc.*;
import java.util.*;
/**
* Map all class uses for a given class.
*
* This code is not part of an API.
* It is implementation that is subject to change.
* Do not use it as an API
*
* @since 1.2
* @author Robert G. Field
*/
public class ClassUseMapper {
private final ClassTree classtree;
/**
* Mapping of ClassDocs to set of PackageDoc used by that class.
* Entries may be null.
*/
public Map<String,Set<PackageDoc>> classToPackage = new HashMap<String,Set<PackageDoc>>();
/**
* Mapping of Annotations to set of PackageDoc that use the annotation.
*/
public Map<String,List<PackageDoc>> classToPackageAnnotations = new HashMap<String,List<PackageDoc>>();
/**
* Mapping of ClassDocs to set of ClassDoc used by that class.
* Entries may be null.
*/
public Map<String,Set<ClassDoc>> classToClass = new HashMap<String,Set<ClassDoc>>();
/**
* Mapping of ClassDocs to list of ClassDoc which are direct or
* indirect subclasses of that class.
* Entries may be null.
*/
public Map<String,List<ClassDoc>> classToSubclass = new HashMap<String,List<ClassDoc>>();
/**
* Mapping of ClassDocs to list of ClassDoc which are direct or
* indirect subinterfaces of that interface.
* Entries may be null.
*/
public Map<String,List<ClassDoc>> classToSubinterface = new HashMap<String,List<ClassDoc>>();
/**
* Mapping of ClassDocs to list of ClassDoc which implement
* this interface.
* Entries may be null.
*/
public Map<String,List<ClassDoc>> classToImplementingClass = new HashMap<String,List<ClassDoc>>();
/**
* Mapping of ClassDocs to list of FieldDoc declared as that class.
* Entries may be null.
*/
public Map<String,List<FieldDoc>> classToField = new HashMap<String,List<FieldDoc>>();
/**
* Mapping of ClassDocs to list of MethodDoc returning that class.
* Entries may be null.
*/
public Map<String,List<MethodDoc>> classToMethodReturn = new HashMap<String,List<MethodDoc>>();
/**
* Mapping of ClassDocs to list of MethodDoc having that class
* as an arg.
* Entries may be null.
*/
public Map<String,List<ExecutableMemberDoc>> classToMethodArgs = new HashMap<String,List<ExecutableMemberDoc>>();
/**
* Mapping of ClassDocs to list of MethodDoc which throws that class.
* Entries may be null.
*/
public Map<String,List<ExecutableMemberDoc>> classToMethodThrows = new HashMap<String,List<ExecutableMemberDoc>>();
/**
* Mapping of ClassDocs to list of ConstructorDoc having that class
* as an arg.
* Entries may be null.
*/
public Map<String,List<ExecutableMemberDoc>> classToConstructorArgs = new HashMap<String,List<ExecutableMemberDoc>>();
/**
* Mapping of ClassDocs to list of ConstructorDoc which throws that class.
* Entries may be null.
*/
public Map<String,List<ExecutableMemberDoc>> classToConstructorThrows = new HashMap<String,List<ExecutableMemberDoc>>();
/**
* The mapping of AnnotationTypeDocs to constructors that use them.
*/
public Map<String,List<ConstructorDoc>> classToConstructorAnnotations = new HashMap<String,List<ConstructorDoc>>();
/**
* The mapping of AnnotationTypeDocs to Constructor parameters that use them.
*/
public Map<String,List<ExecutableMemberDoc>> classToConstructorParamAnnotation = new HashMap<String,List<ExecutableMemberDoc>>();
/**
* The mapping of ClassDocs to Constructor arguments that use them as type parameters.
*/
public Map<String,List<ExecutableMemberDoc>> classToConstructorDocArgTypeParam = new HashMap<String,List<ExecutableMemberDoc>>();
/**
* The mapping of ClassDocs to ClassDocs that use them as type parameters.
*/
public Map<String,List<ClassDoc>> classToClassTypeParam = new HashMap<String,List<ClassDoc>>();
/**
* The mapping of AnnotationTypeDocs to ClassDocs that use them.
*/
public Map<String,List<ClassDoc>> classToClassAnnotations = new HashMap<String,List<ClassDoc>>();
/**
* The mapping of ClassDocs to ExecutableMemberDocs that use them as type parameters.
*/
public Map<String,List<MethodDoc>> classToExecMemberDocTypeParam = new HashMap<String,List<MethodDoc>>();
/**
* The mapping of ClassDocs to ExecutableMemberDocs arguments that use them as type parameters.
*/
public Map<String,List<ExecutableMemberDoc>> classToExecMemberDocArgTypeParam = new HashMap<String,List<ExecutableMemberDoc>>();
/**
* The mapping of AnnotationTypeDocs to ExecutableMemberDocs that use them.
*/
public Map<String,List<MethodDoc>> classToExecMemberDocAnnotations = new HashMap<String,List<MethodDoc>>();
/**
* The mapping of ClassDocs to ExecutableMemberDocs that have return type
* with type parameters of that class.
*/
public Map<String,List<MethodDoc>> classToExecMemberDocReturnTypeParam = new HashMap<String,List<MethodDoc>>();
/**
* The mapping of AnnotationTypeDocs to MethodDoc parameters that use them.
*/
public Map<String,List<ExecutableMemberDoc>> classToExecMemberDocParamAnnotation = new HashMap<String,List<ExecutableMemberDoc>>();
/**
* The mapping of ClassDocs to FieldDocs that use them as type parameters.
*/
public Map<String,List<FieldDoc>> classToFieldDocTypeParam = new HashMap<String,List<FieldDoc>>();
/**
* The mapping of AnnotationTypeDocs to FieldDocs that use them.
*/
public Map<String,List<FieldDoc>> annotationToFieldDoc = new HashMap<String,List<FieldDoc>>();
public ClassUseMapper(RootDoc root, ClassTree classtree) {
this.classtree = classtree;
// Map subclassing, subinterfacing implementing, ...
for (Iterator<ClassDoc> it = classtree.baseclasses().iterator(); it.hasNext();) {
subclasses(it.next());
}
for (Iterator<ClassDoc> it = classtree.baseinterfaces().iterator(); it.hasNext();) {
// does subinterfacing as side-effect
implementingClasses(it.next());
}
// Map methods, fields, constructors using a class.
ClassDoc[] classes = root.classes();
for (int i = 0; i < classes.length; i++) {
PackageDoc pkg = classes[i].containingPackage();
mapAnnotations(classToPackageAnnotations, pkg, pkg);
ClassDoc cd = classes[i];
mapTypeParameters(classToClassTypeParam, cd, cd);
mapAnnotations(classToClassAnnotations, cd, cd);
FieldDoc[] fields = cd.fields();
for (int j = 0; j < fields.length; j++) {
FieldDoc fd = fields[j];
mapTypeParameters(classToFieldDocTypeParam, fd, fd);
mapAnnotations(annotationToFieldDoc, fd, fd);
if (! fd.type().isPrimitive()) {
add(classToField, fd.type().asClassDoc(), fd);
}
}
ConstructorDoc[] cons = cd.constructors();
for (int j = 0; j < cons.length; j++) {
mapAnnotations(classToConstructorAnnotations, cons[j], cons[j]);
mapExecutable(cons[j]);
}
MethodDoc[] meths = cd.methods();
for (int j = 0; j < meths.length; j++) {
MethodDoc md = meths[j];
mapExecutable(md);
mapTypeParameters(classToExecMemberDocTypeParam, md, md);
mapAnnotations(classToExecMemberDocAnnotations, md, md);
if (! (md.returnType().isPrimitive() || md.returnType() instanceof TypeVariable)) {
mapTypeParameters(classToExecMemberDocReturnTypeParam,
md.returnType(), md);
add(classToMethodReturn, md.returnType().asClassDoc(), md);
}
}
}
}
/**
* Return all subclasses of a class AND fill-in classToSubclass map.
*/
private Collection<ClassDoc> subclasses(ClassDoc cd) {
Collection<ClassDoc> ret = classToSubclass.get(cd.qualifiedName());
if (ret == null) {
ret = new TreeSet<ClassDoc>();
List<ClassDoc> subs = classtree.subclasses(cd);
if (subs != null) {
ret.addAll(subs);
for (Iterator<ClassDoc> it = subs.iterator(); it.hasNext();) {
ret.addAll(subclasses(it.next()));
}
}
addAll(classToSubclass, cd, ret);
}
return ret;
}
/**
* Return all subinterfaces of an interface AND fill-in classToSubinterface map.
*/
private Collection<ClassDoc> subinterfaces(ClassDoc cd) {
Collection<ClassDoc> ret = classToSubinterface.get(cd.qualifiedName());
if (ret == null) {
ret = new TreeSet<ClassDoc>();
List<ClassDoc> subs = classtree.subinterfaces(cd);
if (subs != null) {
ret.addAll(subs);
for (Iterator<ClassDoc> it = subs.iterator(); it.hasNext();) {
ret.addAll(subinterfaces(it.next()));
}
}
addAll(classToSubinterface, cd, ret);
}
return ret;
}
/**
* Return all implementing classes of an interface (including
* all subclasses of implementing classes and all classes
* implementing subinterfaces) AND fill-in both classToImplementingClass
* and classToSubinterface maps.
*/
private Collection<ClassDoc> implementingClasses(ClassDoc cd) {
Collection<ClassDoc> ret = classToImplementingClass.get(cd.qualifiedName());
if (ret == null) {
ret = new TreeSet<ClassDoc>();
List<ClassDoc> impl = classtree.implementingclasses(cd);
if (impl != null) {
ret.addAll(impl);
for (Iterator<ClassDoc> it = impl.iterator(); it.hasNext();) {
ret.addAll(subclasses(it.next()));
}
}
for (Iterator<ClassDoc> it = subinterfaces(cd).iterator(); it.hasNext();) {
ret.addAll(implementingClasses(it.next()));
}
addAll(classToImplementingClass, cd, ret);
}
return ret;
}
/**
* Determine classes used by a method or constructor, so they can be
* inverse mapped.
*/
private void mapExecutable(ExecutableMemberDoc em) {
Parameter[] params = em.parameters();
boolean isConstructor = em.isConstructor();
List<Type> classArgs = new ArrayList<Type>();
for (int k = 0; k < params.length; k++) {
Type pcd = params[k].type();
// primitives don't get mapped, also avoid dups
if ((! params[k].type().isPrimitive()) &&
! classArgs.contains(pcd) &&
! (pcd instanceof TypeVariable)) {
add(isConstructor? classToConstructorArgs :classToMethodArgs,
pcd.asClassDoc(), em);
classArgs.add(pcd);
mapTypeParameters(isConstructor?
classToConstructorDocArgTypeParam : classToExecMemberDocArgTypeParam,
pcd, em);
}
mapAnnotations(
isConstructor ?
classToConstructorParamAnnotation :
classToExecMemberDocParamAnnotation,
params[k], em);
}
ClassDoc[] thr = em.thrownExceptions();
for (int k = 0; k < thr.length; k++) {
add(isConstructor? classToConstructorThrows : classToMethodThrows,
thr[k], em);
}
}
private <T> List<T> refList(Map<String,List<T>> map, ClassDoc cd) {
List<T> list = map.get(cd.qualifiedName());
if (list == null) {
List<T> l = new ArrayList<T>();
list = l;
map.put(cd.qualifiedName(), list);
}
return list;
}
private Set<PackageDoc> packageSet(ClassDoc cd) {
Set<PackageDoc> pkgSet = classToPackage.get(cd.qualifiedName());
if (pkgSet == null) {
pkgSet = new TreeSet<PackageDoc>();
classToPackage.put(cd.qualifiedName(), pkgSet);
}
return pkgSet;
}
private Set<ClassDoc> classSet(ClassDoc cd) {
Set<ClassDoc> clsSet = classToClass.get(cd.qualifiedName());
if (clsSet == null) {
Set<ClassDoc> s = new TreeSet<ClassDoc>();
clsSet = s;
classToClass.put(cd.qualifiedName(), clsSet);
}
return clsSet;
}
private <T extends ProgramElementDoc> void add(Map<String,List<T>> map, ClassDoc cd, T ref) {
// add to specified map
refList(map, cd).add(ref);
// add ref's package to package map and class map
packageSet(cd).add(ref.containingPackage());
classSet(cd).add(ref instanceof MemberDoc?
((MemberDoc)ref).containingClass() :
(ClassDoc)ref);
}
private void addAll(Map<String,List<ClassDoc>> map, ClassDoc cd, Collection<ClassDoc> refs) {
if (refs == null) {
return;
}
// add to specified map
refList(map, cd).addAll(refs);
Set<PackageDoc> pkgSet = packageSet(cd);
Set<ClassDoc> clsSet = classSet(cd);
// add ref's package to package map and class map
for (Iterator<ClassDoc> it = refs.iterator(); it.hasNext();) {
ClassDoc cls = it.next();
pkgSet.add(cls.containingPackage());
clsSet.add(cls);
}
}
/**
* Map the ClassDocs to the ProgramElementDocs that use them as
* type parameters.
*
* @param map the map the insert the information into.
* @param doc the doc whose type parameters are being checked.
* @param holder the holder that owns the type parameters.
*/
private <T extends ProgramElementDoc> void mapTypeParameters(Map<String,List<T>> map, Object doc,
T holder) {
TypeVariable[] typeVariables;
if (doc instanceof ClassDoc) {
typeVariables = ((ClassDoc) doc).typeParameters();
} else if (doc instanceof WildcardType) {
Type[] extendsBounds = ((WildcardType) doc).extendsBounds();
for (int k = 0; k < extendsBounds.length; k++) {
addTypeParameterToMap(map, extendsBounds[k], holder);
}
Type[] superBounds = ((WildcardType) doc).superBounds();
for (int k = 0; k < superBounds.length; k++) {
addTypeParameterToMap(map, superBounds[k], holder);
}
return;
} else if (doc instanceof ParameterizedType) {
Type[] typeArguments = ((ParameterizedType) doc).typeArguments();
for (int k = 0; k < typeArguments.length; k++) {
addTypeParameterToMap(map, typeArguments[k], holder);
}
return;
} else if (doc instanceof ExecutableMemberDoc) {
typeVariables = ((ExecutableMemberDoc) doc).typeParameters();
} else if (doc instanceof FieldDoc) {
Type fieldType = ((FieldDoc) doc).type();
mapTypeParameters(map, fieldType, holder);
return;
} else {
return;
}
for (int i = 0; i < typeVariables.length; i++) {
Type[] bounds = typeVariables[i].bounds();
for (int j = 0; j < bounds.length; j++) {
addTypeParameterToMap(map, bounds[j], holder);
}
}
}
/**
* Map the AnnotationType to the ProgramElementDocs that use them as
* type parameters.
*
* @param map the map the insert the information into.
* @param doc the doc whose type parameters are being checked.
* @param holder the holder that owns the type parameters.
*/
private <T extends ProgramElementDoc> void mapAnnotations(Map<String,List<T>> map, Object doc,
T holder) {
AnnotationDesc[] annotations;
boolean isPackage = false;
if (doc instanceof ProgramElementDoc) {
annotations = ((ProgramElementDoc) doc).annotations();
} else if (doc instanceof PackageDoc) {
annotations = ((PackageDoc) doc).annotations();
isPackage = true;
} else if (doc instanceof Parameter) {
annotations = ((Parameter) doc).annotations();
} else {
throw new DocletAbortException();
}
for (int i = 0; i < annotations.length; i++) {
AnnotationTypeDoc annotationDoc = annotations[i].annotationType();
if (isPackage)
refList(map, annotationDoc).add(holder);
else
add(map, annotationDoc, holder);
}
}
/**
* Map the AnnotationType to the ProgramElementDocs that use them as
* type parameters.
*
* @param map the map the insert the information into.
* @param doc the doc whose type parameters are being checked.
* @param holder the holder that owns the type parameters.
*/
private <T extends PackageDoc> void mapAnnotations(Map<String,List<T>> map, PackageDoc doc,
T holder) {
AnnotationDesc[] annotations;
annotations = doc.annotations();
for (int i = 0; i < annotations.length; i++) {
AnnotationTypeDoc annotationDoc = annotations[i].annotationType();
refList(map, annotationDoc).add(holder);
}
}
private <T extends ProgramElementDoc> void addTypeParameterToMap(Map<String,List<T>> map, Type type,
T holder) {
if (type instanceof ClassDoc) {
add(map, (ClassDoc) type, holder);
} else if (type instanceof ParameterizedType) {
add(map, ((ParameterizedType) type).asClassDoc(), holder);
}
mapTypeParameters(map, type, holder);
}
}
| |
/*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.scheduling.concurrent;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.lang.Nullable;
/**
* JavaBean that allows for configuring a {@link java.util.concurrent.ThreadPoolExecutor}
* in bean style (through its "corePoolSize", "maxPoolSize", "keepAliveSeconds",
* "queueCapacity" properties) and exposing it as a bean reference of its native
* {@link java.util.concurrent.ExecutorService} type.
*
* <p>The default configuration is a core pool size of 1, with unlimited max pool size
* and unlimited queue capacity. This is roughly equivalent to
* {@link java.util.concurrent.Executors#newSingleThreadExecutor()}, sharing a single
* thread for all tasks. Setting {@link #setQueueCapacity "queueCapacity"} to 0 mimics
* {@link java.util.concurrent.Executors#newCachedThreadPool()}, with immediate scaling
* of threads in the pool to a potentially very high number. Consider also setting a
* {@link #setMaxPoolSize "maxPoolSize"} at that point, as well as possibly a higher
* {@link #setCorePoolSize "corePoolSize"} (see also the
* {@link #setAllowCoreThreadTimeOut "allowCoreThreadTimeOut"} mode of scaling).
*
* <p>For an alternative, you may set up a {@link ThreadPoolExecutor} instance directly
* using constructor injection, or use a factory method definition that points to the
* {@link java.util.concurrent.Executors} class.
* <b>This is strongly recommended in particular for common {@code @Bean} methods in
* configuration classes, where this {@code FactoryBean} variant would force you to
* return the {@code FactoryBean} type instead of the actual {@code Executor} type.</b>
*
* <p>If you need a timing-based {@link java.util.concurrent.ScheduledExecutorService}
* instead, consider {@link ScheduledExecutorFactoryBean}.
* @author Juergen Hoeller
* @since 3.0
* @see java.util.concurrent.ExecutorService
* @see java.util.concurrent.Executors
* @see java.util.concurrent.ThreadPoolExecutor
*/
@SuppressWarnings("serial")
public class ThreadPoolExecutorFactoryBean extends ExecutorConfigurationSupport
implements FactoryBean<ExecutorService> {
private int corePoolSize = 1;
private int maxPoolSize = Integer.MAX_VALUE;
private int keepAliveSeconds = 60;
private boolean allowCoreThreadTimeOut = false;
private boolean prestartAllCoreThreads = false;
private int queueCapacity = Integer.MAX_VALUE;
private boolean exposeUnconfigurableExecutor = false;
@Nullable
private ExecutorService exposedExecutor;
/**
* Set the ThreadPoolExecutor's core pool size.
* Default is 1.
*/
public void setCorePoolSize(int corePoolSize) {
this.corePoolSize = corePoolSize;
}
/**
* Set the ThreadPoolExecutor's maximum pool size.
* Default is {@code Integer.MAX_VALUE}.
*/
public void setMaxPoolSize(int maxPoolSize) {
this.maxPoolSize = maxPoolSize;
}
/**
* Set the ThreadPoolExecutor's keep-alive seconds.
* Default is 60.
*/
public void setKeepAliveSeconds(int keepAliveSeconds) {
this.keepAliveSeconds = keepAliveSeconds;
}
/**
* Specify whether to allow core threads to time out. This enables dynamic
* growing and shrinking even in combination with a non-zero queue (since
* the max pool size will only grow once the queue is full).
* <p>Default is "false".
* @see java.util.concurrent.ThreadPoolExecutor#allowCoreThreadTimeOut(boolean)
*/
public void setAllowCoreThreadTimeOut(boolean allowCoreThreadTimeOut) {
this.allowCoreThreadTimeOut = allowCoreThreadTimeOut;
}
/**
* Specify whether to start all core threads, causing them to idly wait for work.
* <p>Default is "false".
* @since 5.3.14
* @see java.util.concurrent.ThreadPoolExecutor#prestartAllCoreThreads
*/
public void setPrestartAllCoreThreads(boolean prestartAllCoreThreads) {
this.prestartAllCoreThreads = prestartAllCoreThreads;
}
/**
* Set the capacity for the ThreadPoolExecutor's BlockingQueue.
* Default is {@code Integer.MAX_VALUE}.
* <p>Any positive value will lead to a LinkedBlockingQueue instance;
* any other value will lead to a SynchronousQueue instance.
* @see java.util.concurrent.LinkedBlockingQueue
* @see java.util.concurrent.SynchronousQueue
*/
public void setQueueCapacity(int queueCapacity) {
this.queueCapacity = queueCapacity;
}
/**
* Specify whether this FactoryBean should expose an unconfigurable
* decorator for the created executor.
* <p>Default is "false", exposing the raw executor as bean reference.
* Switch this flag to "true" to strictly prevent clients from
* modifying the executor's configuration.
* @see java.util.concurrent.Executors#unconfigurableExecutorService
*/
public void setExposeUnconfigurableExecutor(boolean exposeUnconfigurableExecutor) {
this.exposeUnconfigurableExecutor = exposeUnconfigurableExecutor;
}
@Override
protected ExecutorService initializeExecutor(
ThreadFactory threadFactory, RejectedExecutionHandler rejectedExecutionHandler) {
BlockingQueue<Runnable> queue = createQueue(this.queueCapacity);
ThreadPoolExecutor executor = createExecutor(this.corePoolSize, this.maxPoolSize,
this.keepAliveSeconds, queue, threadFactory, rejectedExecutionHandler);
if (this.allowCoreThreadTimeOut) {
executor.allowCoreThreadTimeOut(true);
}
if (this.prestartAllCoreThreads) {
executor.prestartAllCoreThreads();
}
// Wrap executor with an unconfigurable decorator.
this.exposedExecutor = (this.exposeUnconfigurableExecutor ?
Executors.unconfigurableExecutorService(executor) : executor);
return executor;
}
/**
* Create a new instance of {@link ThreadPoolExecutor} or a subclass thereof.
* <p>The default implementation creates a standard {@link ThreadPoolExecutor}.
* Can be overridden to provide custom {@link ThreadPoolExecutor} subclasses.
* @param corePoolSize the specified core pool size
* @param maxPoolSize the specified maximum pool size
* @param keepAliveSeconds the specified keep-alive time in seconds
* @param queue the BlockingQueue to use
* @param threadFactory the ThreadFactory to use
* @param rejectedExecutionHandler the RejectedExecutionHandler to use
* @return a new ThreadPoolExecutor instance
* @see #afterPropertiesSet()
*/
protected ThreadPoolExecutor createExecutor(
int corePoolSize, int maxPoolSize, int keepAliveSeconds, BlockingQueue<Runnable> queue,
ThreadFactory threadFactory, RejectedExecutionHandler rejectedExecutionHandler) {
return new ThreadPoolExecutor(corePoolSize, maxPoolSize,
keepAliveSeconds, TimeUnit.SECONDS, queue, threadFactory, rejectedExecutionHandler);
}
/**
* Create the BlockingQueue to use for the ThreadPoolExecutor.
* <p>A LinkedBlockingQueue instance will be created for a positive
* capacity value; a SynchronousQueue else.
* @param queueCapacity the specified queue capacity
* @return the BlockingQueue instance
* @see java.util.concurrent.LinkedBlockingQueue
* @see java.util.concurrent.SynchronousQueue
*/
protected BlockingQueue<Runnable> createQueue(int queueCapacity) {
if (queueCapacity > 0) {
return new LinkedBlockingQueue<>(queueCapacity);
}
else {
return new SynchronousQueue<>();
}
}
@Override
@Nullable
public ExecutorService getObject() {
return this.exposedExecutor;
}
@Override
public Class<? extends ExecutorService> getObjectType() {
return (this.exposedExecutor != null ? this.exposedExecutor.getClass() : ExecutorService.class);
}
@Override
public boolean isSingleton() {
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.traversal.util;
import org.apache.tinkerpop.gremlin.process.traversal.Step;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalEngine;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalSideEffects;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies;
import org.apache.tinkerpop.gremlin.process.traversal.Traverser;
import org.apache.tinkerpop.gremlin.process.traversal.engine.StandardTraversalEngine;
import org.apache.tinkerpop.gremlin.process.traversal.step.TraversalParent;
import org.apache.tinkerpop.gremlin.process.traversal.step.util.EmptyStep;
import org.apache.tinkerpop.gremlin.process.traversal.traverser.TraverserRequirement;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.apache.tinkerpop.gremlin.structure.util.StringFactory;
import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/**
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
public class DefaultTraversal<S, E> implements Traversal.Admin<S, E> {
private E lastEnd = null;
private long lastEndCount = 0l;
private Step<?, E> finalEndStep = EmptyStep.instance();
private final StepPosition stepPosition = new StepPosition();
protected transient Graph graph;
protected List<Step> steps = new ArrayList<>();
// steps will be repeatedly retrieved from this traversal so wrap them once in an immutable list that can be reused
protected List<Step> unmodifiableSteps = Collections.unmodifiableList(steps);
protected TraversalParent traversalParent = (TraversalParent) EmptyStep.instance();
protected TraversalSideEffects sideEffects = new DefaultTraversalSideEffects();
protected TraversalStrategies strategies;
protected TraversalEngine traversalEngine = StandardTraversalEngine.instance(); // necessary for strategies that need the engine in OLAP message passing (not so bueno)
protected boolean locked = false;
protected Set<TraverserRequirement> traverserRequirements = new HashSet<>();
public DefaultTraversal() {
this.graph = null;
// necessary for anonymous traversals without a graph start (rethink how this works in the future)
this.setStrategies(TraversalStrategies.GlobalCache.getStrategies(EmptyGraph.class));
}
public DefaultTraversal(final Graph graph) {
this.graph = graph;
this.setStrategies(TraversalStrategies.GlobalCache.getStrategies(this.graph.getClass()));
}
@Override
public Traversal.Admin<S, E> asAdmin() {
return this;
}
@Override
public void applyStrategies() throws IllegalStateException {
if (this.locked) throw Traversal.Exceptions.traversalIsLocked();
TraversalHelper.reIdSteps(this.stepPosition, this);
this.strategies.applyStrategies(this);
boolean hasGraph = null != this.graph;
for (final Step<?, ?> step : this.getSteps()) {
if (step instanceof TraversalParent) {
for (final Traversal.Admin<?, ?> globalChild : ((TraversalParent) step).getGlobalChildren()) {
globalChild.setStrategies(this.strategies);
globalChild.setEngine(this.traversalEngine);
if (hasGraph) globalChild.setGraph(this.graph);
globalChild.applyStrategies();
}
for (final Traversal.Admin<?, ?> localChild : ((TraversalParent) step).getLocalChildren()) {
localChild.setStrategies(this.strategies);
localChild.setEngine(StandardTraversalEngine.instance());
if (hasGraph) localChild.setGraph(this.graph);
localChild.applyStrategies();
}
}
}
this.finalEndStep = this.getEndStep();
this.locked = true;
}
@Override
public TraversalEngine getEngine() {
return this.traversalEngine;
}
@Override
public void setEngine(final TraversalEngine engine) {
this.traversalEngine = engine;
}
@Override
public Set<TraverserRequirement> getTraverserRequirements() {
Set<TraverserRequirement> requirements = new HashSet<>();
for (Step step : this.getSteps()) {
requirements.addAll(step.getRequirements());
}
requirements.addAll(this.traverserRequirements);
if (this.getSideEffects().keys().size() > 0)
requirements.add(TraverserRequirement.SIDE_EFFECTS);
if (null != this.getSideEffects().getSackInitialValue())
requirements.add(TraverserRequirement.SACK);
if (this.getEngine().isComputer())
requirements.add(TraverserRequirement.BULK);
if (requirements.contains(TraverserRequirement.ONE_BULK))
requirements.remove(TraverserRequirement.BULK);
return requirements;
}
@Override
public void addTraverserRequirement(final TraverserRequirement traverserRequirement) {
this.traverserRequirements.add(traverserRequirement);
}
@Override
public List<Step> getSteps() {
return unmodifiableSteps;
}
@Override
public boolean hasNext() {
if (!this.locked) this.applyStrategies();
return this.lastEndCount > 0l || this.finalEndStep.hasNext();
}
@Override
public E next() {
if (!this.locked) this.applyStrategies();
if (this.lastEndCount > 0l) {
this.lastEndCount--;
return this.lastEnd;
} else {
final Traverser<E> next = this.finalEndStep.next();
final long nextBulk = next.bulk();
if (nextBulk == 1) {
return next.get();
} else {
this.lastEndCount = nextBulk - 1;
this.lastEnd = next.get();
return this.lastEnd;
}
}
}
@Override
public void reset() {
this.steps.forEach(Step::reset);
this.lastEndCount = 0l;
}
@Override
public void addStart(final Traverser<S> start) {
if (!this.locked) this.applyStrategies();
if (!this.steps.isEmpty()) this.steps.get(0).addStart(start);
}
@Override
public void addStarts(final Iterator<Traverser<S>> starts) {
if (!this.locked) this.applyStrategies();
if (!this.steps.isEmpty()) this.steps.get(0).addStarts(starts);
}
@Override
public String toString() {
return StringFactory.traversalString(this);
}
@Override
public Step<S, ?> getStartStep() {
return this.steps.isEmpty() ? EmptyStep.instance() : this.steps.get(0);
}
@Override
public Step<?, E> getEndStep() {
return this.steps.isEmpty() ? EmptyStep.instance() : this.steps.get(this.steps.size() - 1);
}
@Override
public DefaultTraversal<S, E> clone() {
try {
final DefaultTraversal<S, E> clone = (DefaultTraversal<S, E>) super.clone();
clone.steps = new ArrayList<>();
clone.unmodifiableSteps = Collections.unmodifiableList(clone.steps);
clone.sideEffects = this.sideEffects.clone();
clone.strategies = this.strategies.clone();
clone.lastEnd = null;
clone.lastEndCount = 0l;
for (final Step<?, ?> step : this.steps) {
final Step<?, ?> clonedStep = step.clone();
clonedStep.setTraversal(clone);
final Step previousStep = clone.steps.isEmpty() ? EmptyStep.instance() : clone.steps.get(clone.steps.size() - 1);
clonedStep.setPreviousStep(previousStep);
previousStep.setNextStep(clonedStep);
clone.steps.add(clonedStep);
}
clone.finalEndStep = clone.getEndStep();
return clone;
} catch (final CloneNotSupportedException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
@Override
public boolean isLocked() {
return this.locked;
}
@Override
public void setSideEffects(final TraversalSideEffects sideEffects) {
this.sideEffects = sideEffects;
}
@Override
public TraversalSideEffects getSideEffects() {
return this.sideEffects;
}
@Override
public void setStrategies(final TraversalStrategies strategies) {
this.strategies = strategies.clone();
}
@Override
public TraversalStrategies getStrategies() {
return this.strategies;
}
@Override
public <S2, E2> Traversal.Admin<S2, E2> addStep(final int index, final Step<?, ?> step) throws IllegalStateException {
if (this.locked) throw Exceptions.traversalIsLocked();
step.setId(this.stepPosition.nextXId());
this.steps.add(index, step);
final Step previousStep = this.steps.size() > 0 && index != 0 ? steps.get(index - 1) : null;
final Step nextStep = this.steps.size() > index + 1 ? steps.get(index + 1) : null;
step.setPreviousStep(null != previousStep ? previousStep : EmptyStep.instance());
step.setNextStep(null != nextStep ? nextStep : EmptyStep.instance());
if (null != previousStep) previousStep.setNextStep(step);
if (null != nextStep) nextStep.setPreviousStep(step);
step.setTraversal(this);
return (Traversal.Admin<S2, E2>) this;
}
@Override
public <S2, E2> Traversal.Admin<S2, E2> removeStep(final int index) throws IllegalStateException {
if (this.locked) throw Exceptions.traversalIsLocked();
final Step previousStep = this.steps.size() > 0 && index != 0 ? steps.get(index - 1) : null;
final Step nextStep = this.steps.size() > index + 1 ? steps.get(index + 1) : null;
//this.steps.get(index).setTraversal(EmptyTraversal.instance());
this.steps.remove(index);
if (null != previousStep) previousStep.setNextStep(null == nextStep ? EmptyStep.instance() : nextStep);
if (null != nextStep) nextStep.setPreviousStep(null == previousStep ? EmptyStep.instance() : previousStep);
return (Traversal.Admin<S2, E2>) this;
}
@Override
public void setParent(final TraversalParent step) {
this.traversalParent = step;
}
@Override
public TraversalParent getParent() {
return this.traversalParent;
}
@Override
public Optional<Graph> getGraph() {
return Optional.ofNullable(this.graph);
}
@Override
public void setGraph(final Graph graph) {
this.graph = graph;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: netty/SubscribeReq.proto
package org.hope6537.netty.codec.protobuf;
public final class SubscribeReqProto {
private SubscribeReqProto() {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface SubscribeReqOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required int32 subReqID = 1;
/**
* <code>required int32 subReqID = 1;</code>
*/
boolean hasSubReqID();
/**
* <code>required int32 subReqID = 1;</code>
*/
int getSubReqID();
// required string userName = 2;
/**
* <code>required string userName = 2;</code>
*/
boolean hasUserName();
/**
* <code>required string userName = 2;</code>
*/
String getUserName();
/**
* <code>required string userName = 2;</code>
*/
com.google.protobuf.ByteString
getUserNameBytes();
// required string productName = 3;
/**
* <code>required string productName = 3;</code>
*/
boolean hasProductName();
/**
* <code>required string productName = 3;</code>
*/
String getProductName();
/**
* <code>required string productName = 3;</code>
*/
com.google.protobuf.ByteString
getProductNameBytes();
// repeated string address = 4;
/**
* <code>repeated string address = 4;</code>
*/
java.util.List<String>
getAddressList();
/**
* <code>repeated string address = 4;</code>
*/
int getAddressCount();
/**
* <code>repeated string address = 4;</code>
*/
String getAddress(int index);
/**
* <code>repeated string address = 4;</code>
*/
com.google.protobuf.ByteString
getAddressBytes(int index);
}
/**
* Protobuf type {@code netty.SubscribeReq}
*/
public static final class SubscribeReq extends
com.google.protobuf.GeneratedMessage
implements SubscribeReqOrBuilder {
// Use SubscribeReq.newBuilder() to construct.
private SubscribeReq(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SubscribeReq(boolean noInit) {
this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private static final SubscribeReq defaultInstance;
public static SubscribeReq getDefaultInstance() {
return defaultInstance;
}
public SubscribeReq getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SubscribeReq(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
subReqID_ = input.readInt32();
break;
}
case 18: {
bitField0_ |= 0x00000002;
userName_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
productName_ = input.readBytes();
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
address_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000008;
}
address_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
address_ = new com.google.protobuf.UnmodifiableLazyStringList(address_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.hope6537.netty.codec.protobuf.SubscribeReqProto.internal_static_netty_SubscribeReq_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.hope6537.netty.codec.protobuf.SubscribeReqProto.internal_static_netty_SubscribeReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq.class, org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq.Builder.class);
}
public static com.google.protobuf.Parser<SubscribeReq> PARSER =
new com.google.protobuf.AbstractParser<SubscribeReq>() {
public SubscribeReq parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SubscribeReq(input, extensionRegistry);
}
};
@Override
public com.google.protobuf.Parser<SubscribeReq> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int32 subReqID = 1;
public static final int SUBREQID_FIELD_NUMBER = 1;
private int subReqID_;
/**
* <code>required int32 subReqID = 1;</code>
*/
public boolean hasSubReqID() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 subReqID = 1;</code>
*/
public int getSubReqID() {
return subReqID_;
}
// required string userName = 2;
public static final int USERNAME_FIELD_NUMBER = 2;
private Object userName_;
/**
* <code>required string userName = 2;</code>
*/
public boolean hasUserName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string userName = 2;</code>
*/
public String getUserName() {
Object ref = userName_;
if (ref instanceof String) {
return (String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
userName_ = s;
}
return s;
}
}
/**
* <code>required string userName = 2;</code>
*/
public com.google.protobuf.ByteString
getUserNameBytes() {
Object ref = userName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
userName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string productName = 3;
public static final int PRODUCTNAME_FIELD_NUMBER = 3;
private Object productName_;
/**
* <code>required string productName = 3;</code>
*/
public boolean hasProductName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string productName = 3;</code>
*/
public String getProductName() {
Object ref = productName_;
if (ref instanceof String) {
return (String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
productName_ = s;
}
return s;
}
}
/**
* <code>required string productName = 3;</code>
*/
public com.google.protobuf.ByteString
getProductNameBytes() {
Object ref = productName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
productName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// repeated string address = 4;
public static final int ADDRESS_FIELD_NUMBER = 4;
private com.google.protobuf.LazyStringList address_;
/**
* <code>repeated string address = 4;</code>
*/
public java.util.List<String>
getAddressList() {
return address_;
}
/**
* <code>repeated string address = 4;</code>
*/
public int getAddressCount() {
return address_.size();
}
/**
* <code>repeated string address = 4;</code>
*/
public String getAddress(int index) {
return address_.get(index);
}
/**
* <code>repeated string address = 4;</code>
*/
public com.google.protobuf.ByteString
getAddressBytes(int index) {
return address_.getByteString(index);
}
private void initFields() {
subReqID_ = 0;
userName_ = "";
productName_ = "";
address_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasSubReqID()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasUserName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasProductName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, subReqID_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getUserNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getProductNameBytes());
}
for (int i = 0; i < address_.size(); i++) {
output.writeBytes(4, address_.getByteString(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, subReqID_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getUserNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getProductNameBytes());
}
{
int dataSize = 0;
for (int i = 0; i < address_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(address_.getByteString(i));
}
size += dataSize;
size += 1 * getAddressList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@Override
protected Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() {
return Builder.create();
}
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder(org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return newBuilder(this);
}
@Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code netty.SubscribeReq}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReqOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.hope6537.netty.codec.protobuf.SubscribeReqProto.internal_static_netty_SubscribeReq_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.hope6537.netty.codec.protobuf.SubscribeReqProto.internal_static_netty_SubscribeReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq.class, org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq.Builder.class);
}
// Construct using org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
subReqID_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
userName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
productName_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
address_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.hope6537.netty.codec.protobuf.SubscribeReqProto.internal_static_netty_SubscribeReq_descriptor;
}
public org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq getDefaultInstanceForType() {
return org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq.getDefaultInstance();
}
public org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq build() {
org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq buildPartial() {
org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq result = new org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.subReqID_ = subReqID_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.userName_ = userName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.productName_ = productName_;
if (((bitField0_ & 0x00000008) == 0x00000008)) {
address_ = new com.google.protobuf.UnmodifiableLazyStringList(
address_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.address_ = address_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq) {
return mergeFrom((org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq other) {
if (other == org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq.getDefaultInstance())
return this;
if (other.hasSubReqID()) {
setSubReqID(other.getSubReqID());
}
if (other.hasUserName()) {
bitField0_ |= 0x00000002;
userName_ = other.userName_;
onChanged();
}
if (other.hasProductName()) {
bitField0_ |= 0x00000004;
productName_ = other.productName_;
onChanged();
}
if (!other.address_.isEmpty()) {
if (address_.isEmpty()) {
address_ = other.address_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureAddressIsMutable();
address_.addAll(other.address_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasSubReqID()) {
return false;
}
if (!hasUserName()) {
return false;
}
if (!hasProductName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.hope6537.netty.codec.protobuf.SubscribeReqProto.SubscribeReq) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required int32 subReqID = 1;
private int subReqID_;
/**
* <code>required int32 subReqID = 1;</code>
*/
public boolean hasSubReqID() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 subReqID = 1;</code>
*/
public int getSubReqID() {
return subReqID_;
}
/**
* <code>required int32 subReqID = 1;</code>
*/
public Builder setSubReqID(int value) {
bitField0_ |= 0x00000001;
subReqID_ = value;
onChanged();
return this;
}
/**
* <code>required int32 subReqID = 1;</code>
*/
public Builder clearSubReqID() {
bitField0_ = (bitField0_ & ~0x00000001);
subReqID_ = 0;
onChanged();
return this;
}
// required string userName = 2;
private Object userName_ = "";
/**
* <code>required string userName = 2;</code>
*/
public boolean hasUserName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string userName = 2;</code>
*/
public String getUserName() {
Object ref = userName_;
if (!(ref instanceof String)) {
String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
userName_ = s;
return s;
} else {
return (String) ref;
}
}
/**
* <code>required string userName = 2;</code>
*/
public com.google.protobuf.ByteString
getUserNameBytes() {
Object ref = userName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
userName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string userName = 2;</code>
*/
public Builder setUserName(
String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
userName_ = value;
onChanged();
return this;
}
/**
* <code>required string userName = 2;</code>
*/
public Builder clearUserName() {
bitField0_ = (bitField0_ & ~0x00000002);
userName_ = getDefaultInstance().getUserName();
onChanged();
return this;
}
/**
* <code>required string userName = 2;</code>
*/
public Builder setUserNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
userName_ = value;
onChanged();
return this;
}
// required string productName = 3;
private Object productName_ = "";
/**
* <code>required string productName = 3;</code>
*/
public boolean hasProductName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string productName = 3;</code>
*/
public String getProductName() {
Object ref = productName_;
if (!(ref instanceof String)) {
String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
productName_ = s;
return s;
} else {
return (String) ref;
}
}
/**
* <code>required string productName = 3;</code>
*/
public com.google.protobuf.ByteString
getProductNameBytes() {
Object ref = productName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
productName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string productName = 3;</code>
*/
public Builder setProductName(
String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
productName_ = value;
onChanged();
return this;
}
/**
* <code>required string productName = 3;</code>
*/
public Builder clearProductName() {
bitField0_ = (bitField0_ & ~0x00000004);
productName_ = getDefaultInstance().getProductName();
onChanged();
return this;
}
/**
* <code>required string productName = 3;</code>
*/
public Builder setProductNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
productName_ = value;
onChanged();
return this;
}
// repeated string address = 4;
private com.google.protobuf.LazyStringList address_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureAddressIsMutable() {
if (!((bitField0_ & 0x00000008) == 0x00000008)) {
address_ = new com.google.protobuf.LazyStringArrayList(address_);
bitField0_ |= 0x00000008;
}
}
/**
* <code>repeated string address = 4;</code>
*/
public java.util.List<String>
getAddressList() {
return java.util.Collections.unmodifiableList(address_);
}
/**
* <code>repeated string address = 4;</code>
*/
public int getAddressCount() {
return address_.size();
}
/**
* <code>repeated string address = 4;</code>
*/
public String getAddress(int index) {
return address_.get(index);
}
/**
* <code>repeated string address = 4;</code>
*/
public com.google.protobuf.ByteString
getAddressBytes(int index) {
return address_.getByteString(index);
}
/**
* <code>repeated string address = 4;</code>
*/
public Builder setAddress(
int index, String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAddressIsMutable();
address_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string address = 4;</code>
*/
public Builder addAddress(
String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAddressIsMutable();
address_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string address = 4;</code>
*/
public Builder addAllAddress(
Iterable<String> values) {
ensureAddressIsMutable();
super.addAll(values, address_);
onChanged();
return this;
}
/**
* <code>repeated string address = 4;</code>
*/
public Builder clearAddress() {
address_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
* <code>repeated string address = 4;</code>
*/
public Builder addAddressBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureAddressIsMutable();
address_.add(value);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:netty.SubscribeReq)
}
static {
defaultInstance = new SubscribeReq(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:netty.SubscribeReq)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_netty_SubscribeReq_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_netty_SubscribeReq_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
String[] descriptorData = {
"\n\030netty/SubscribeReq.proto\022\005netty\"X\n\014Sub" +
"scribeReq\022\020\n\010subReqID\030\001 \002(\005\022\020\n\010userName\030" +
"\002 \002(\t\022\023\n\013productName\030\003 \002(\t\022\017\n\007address\030\004 " +
"\003(\tB2\n\035org.hope6537.io.codec.protobufB\021Su" +
"bscribeReqProto"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_netty_SubscribeReq_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_netty_SubscribeReq_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_netty_SubscribeReq_descriptor,
new String[]{"SubReqID", "UserName", "ProductName", "Address",});
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[]{
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
package com.netflix.astyanax.thrift;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.cassandra.thrift.ColumnParent;
import org.apache.cassandra.thrift.KeyRange;
import org.apache.cassandra.thrift.KeySlice;
import org.apache.cassandra.thrift.SlicePredicate;
import org.apache.cassandra.thrift.SliceRange;
import org.apache.cassandra.thrift.Cassandra.Client;
import org.apache.cassandra.utils.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
import com.netflix.astyanax.CassandraOperationType;
import com.netflix.astyanax.ExceptionCallback;
import com.netflix.astyanax.RowCallback;
import com.netflix.astyanax.connectionpool.ConnectionContext;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.TokenRange;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.impl.OperationResultImpl;
import com.netflix.astyanax.model.ByteBufferRange;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnSlice;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.partitioner.Partitioner;
import com.netflix.astyanax.query.AllRowsQuery;
import com.netflix.astyanax.query.CheckpointManager;
import com.netflix.astyanax.shallows.EmptyCheckpointManager;
import com.netflix.astyanax.thrift.model.ThriftRowsSliceImpl;
public class ThriftAllRowsQueryImpl<K, C> implements AllRowsQuery<K, C> {
private final static Logger LOG = LoggerFactory.getLogger(ThriftAllRowsQueryImpl.class);
private final ThriftColumnFamilyQueryImpl<K,C> query;
protected SlicePredicate predicate = new SlicePredicate().setSlice_range(ThriftUtils.createAllInclusiveSliceRange());
protected CheckpointManager checkpointManager = new EmptyCheckpointManager();
protected ColumnFamily<K, C> columnFamily;
private ExceptionCallback exceptionCallback;
private int blockSize = 100;
private boolean repeatLastToken = true;
private Integer nThreads;
private String startToken ;
private String endToken ;
private Boolean includeEmptyRows;
public ThriftAllRowsQueryImpl(ThriftColumnFamilyQueryImpl<K, C> query) {
this.columnFamily = query.columnFamily;
this.query = query;
}
protected List<org.apache.cassandra.thrift.KeySlice> getNextBlock(final KeyRange range) {
ThriftKeyspaceImpl keyspace = query.keyspace;
while (true) {
try {
return keyspace.connectionPool.executeWithFailover(
new AbstractKeyspaceOperationImpl<List<org.apache.cassandra.thrift.KeySlice>>(
keyspace.tracerFactory.newTracer(CassandraOperationType.GET_ROWS_RANGE, columnFamily),
query.pinnedHost, keyspace.getKeyspaceName()) {
@Override
public List<org.apache.cassandra.thrift.KeySlice> internalExecute(Client client, ConnectionContext context)
throws Exception {
List<KeySlice> slice = client.get_range_slices(
new ColumnParent().setColumn_family(columnFamily.getName()), predicate,
range, ThriftConverter.ToThriftConsistencyLevel(query.consistencyLevel));
return slice;
}
@Override
public ByteBuffer getRowKey() {
if (range.getStart_key() != null)
return range.start_key;
return null;
}
}, query.retry).getResult();
}
catch (ConnectionException e) {
// Let exception callback handle this exception. If it
// returns false then
// we return an empty result which the iterator's
// hasNext() to return false.
// If no exception handler is provided then simply
// return an empty set as if the
// there is no more data
if (this.getExceptionCallback() == null) {
throw new RuntimeException(e);
}
else {
if (!this.getExceptionCallback().onException(e)) {
return new ArrayList<org.apache.cassandra.thrift.KeySlice>();
}
}
}
}
}
@Override
public OperationResult<Rows<K, C>> execute() throws ConnectionException {
return new OperationResultImpl<Rows<K, C>>(Host.NO_HOST,
new ThriftAllRowsImpl<K, C>(query.keyspace.getPartitioner(), this, columnFamily), 0);
}
@Override
public ListenableFuture<OperationResult<Rows<K, C>>> executeAsync() throws ConnectionException {
throw new UnsupportedOperationException("executeAsync not supported here. Use execute()");
}
private boolean shouldIgnoreEmptyRows() {
if (getIncludeEmptyRows() == null) {
if (getPredicate().isSetSlice_range() && getPredicate().getSlice_range().getCount() == 0) {
return false;
}
}
else {
return !getIncludeEmptyRows();
}
return true;
}
@Override
public void executeWithCallback(final RowCallback<K, C> callback) throws ConnectionException {
final ThriftKeyspaceImpl keyspace = query.keyspace;
final Partitioner partitioner = keyspace.getPartitioner();
final AtomicReference<ConnectionException> error = new AtomicReference<ConnectionException>();
final boolean bIgnoreTombstones = shouldIgnoreEmptyRows();
List<Pair<String, String>> ranges;
if (this.getConcurrencyLevel() != null) {
ranges = Lists.newArrayList();
int nThreads = this.getConcurrencyLevel();
List<TokenRange> tokens = partitioner.splitTokenRange(
startToken == null ? partitioner.getMinToken() : startToken,
endToken == null ? partitioner.getMaxToken() : endToken,
nThreads);
for (TokenRange range : tokens) {
try {
String currentToken = checkpointManager.getCheckpoint(range.getStartToken());
if (currentToken == null) {
currentToken = range.getStartToken();
}
else if (currentToken.equals(range.getEndToken())) {
continue;
}
ranges.add(Pair.create(currentToken, range.getEndToken()));
} catch (Exception e) {
throw ThriftConverter.ToConnectionPoolException(e);
}
}
}
else {
ranges = Lists.transform(keyspace.describeRing(true), new Function<TokenRange, Pair<String, String>> () {
@Override
public Pair<String, String> apply(TokenRange input) {
return Pair.create(input.getStartToken(), input.getEndToken());
}
});
}
final CountDownLatch doneSignal = new CountDownLatch(ranges.size());
for (final Pair<String, String> tokenPair : ranges) {
// Prepare the range of tokens for this token range
final KeyRange range = new KeyRange()
.setCount(getBlockSize())
.setStart_token(tokenPair.left)
.setEnd_token(tokenPair.right);
query.executor.submit(new Callable<Void>() {
private boolean firstBlock = true;
@Override
public Void call() throws Exception {
if (error.get() == null && internalRun()) {
query.executor.submit(this);
}
else {
doneSignal.countDown();
}
return null;
}
private boolean internalRun() throws Exception {
try {
// Get the next block
List<KeySlice> ks = keyspace.connectionPool.executeWithFailover(
new AbstractKeyspaceOperationImpl<List<KeySlice>>(keyspace.tracerFactory
.newTracer(CassandraOperationType.GET_ROWS_RANGE,
columnFamily), query.pinnedHost, keyspace
.getKeyspaceName()) {
@Override
public List<KeySlice> internalExecute(Client client, ConnectionContext context)
throws Exception {
return client.get_range_slices(new ColumnParent()
.setColumn_family(columnFamily.getName()),
predicate, range, ThriftConverter
.ToThriftConsistencyLevel(query.consistencyLevel));
}
@Override
public ByteBuffer getRowKey() {
if (range.getStart_key() != null)
return ByteBuffer.wrap(range.getStart_key());
return null;
}
}, query.retry.duplicate()).getResult();
// Notify the callback
if (!ks.isEmpty()) {
KeySlice lastRow = Iterables.getLast(ks);
boolean bContinue = (ks.size() == getBlockSize());
if (getRepeatLastToken()) {
if (firstBlock) {
firstBlock = false;
}
else {
ks.remove(0);
}
}
if (bIgnoreTombstones) {
Iterator<KeySlice> iter = ks.iterator();
while (iter.hasNext()) {
if (iter.next().getColumnsSize() == 0)
iter.remove();
}
}
Rows<K, C> rows = new ThriftRowsSliceImpl<K, C>(ks, columnFamily
.getKeySerializer(), columnFamily.getColumnSerializer());
try {
callback.success(rows);
}
catch (Throwable t) {
ConnectionException ce = ThriftConverter.ToConnectionPoolException(t);
error.set(ce);
return false;
}
if (bContinue) {
// Determine the start token for the next page
String token = partitioner.getTokenForKey(lastRow.bufferForKey()).toString();
checkpointManager.trackCheckpoint(tokenPair.left, token);
if (getRepeatLastToken()) {
range.setStart_token(partitioner.getTokenMinusOne(token));
}
else {
range.setStart_token(token);
}
}
else {
checkpointManager.trackCheckpoint(tokenPair.left, tokenPair.right);
return false;
}
}
else {
checkpointManager.trackCheckpoint(tokenPair.left, tokenPair.right);
return false;
}
}
catch (Exception e) {
ConnectionException ce = ThriftConverter.ToConnectionPoolException(e);
if (!callback.failure(ce)) {
error.set(ce);
return false;
}
}
return true;
}
});
}
// Block until all threads finish
try {
doneSignal.await();
}
catch (InterruptedException e) {
LOG.debug("Execution interrupted on get all rows for keyspace " + keyspace.getKeyspaceName());
}
if (error.get() != null) {
throw error.get();
}
}
public AllRowsQuery<K, C> setExceptionCallback(ExceptionCallback cb) {
exceptionCallback = cb;
return this;
}
protected ExceptionCallback getExceptionCallback() {
return this.exceptionCallback;
}
@Override
public AllRowsQuery<K, C> setThreadCount(int numberOfThreads) {
setConcurrencyLevel(numberOfThreads);
return this;
}
@Override
public AllRowsQuery<K, C> setConcurrencyLevel(int numberOfThreads) {
this.nThreads = numberOfThreads;
return this;
}
@Override
public AllRowsQuery<K, C> setCheckpointManager(CheckpointManager manager) {
this.checkpointManager = manager;
return this;
}
@Override
public AllRowsQuery<K, C> withColumnSlice(C... columns) {
if (columns != null)
predicate.setColumn_names(columnFamily.getColumnSerializer().toBytesList(Arrays.asList(columns)))
.setSlice_rangeIsSet(false);
return this;
}
@Override
public AllRowsQuery<K, C> withColumnSlice(Collection<C> columns) {
if (columns != null)
predicate.setColumn_names(columnFamily.getColumnSerializer().toBytesList(columns)).setSlice_rangeIsSet(
false);
return this;
}
@Override
public AllRowsQuery<K, C> withColumnRange(C startColumn, C endColumn, boolean reversed, int count) {
predicate.setSlice_range(ThriftUtils.createSliceRange(columnFamily.getColumnSerializer(), startColumn,
endColumn, reversed, count));
return this;
}
@Override
public AllRowsQuery<K, C> withColumnRange(ByteBuffer startColumn, ByteBuffer endColumn, boolean reversed, int count) {
predicate.setSlice_range(new SliceRange(startColumn, endColumn, reversed, count));
return this;
}
@Override
public AllRowsQuery<K, C> withColumnSlice(ColumnSlice<C> slice) {
if (slice.getColumns() != null) {
predicate.setColumn_names(columnFamily.getColumnSerializer().toBytesList(slice.getColumns()))
.setSlice_rangeIsSet(false);
}
else {
predicate.setSlice_range(ThriftUtils.createSliceRange(columnFamily.getColumnSerializer(),
slice.getStartColumn(), slice.getEndColumn(), slice.getReversed(), slice.getLimit()));
}
return this;
}
@Override
public AllRowsQuery<K, C> withColumnRange(ByteBufferRange range) {
predicate.setSlice_range(new SliceRange().setStart(range.getStart()).setFinish(range.getEnd())
.setCount(range.getLimit()).setReversed(range.isReversed()));
return this;
}
@Override
public AllRowsQuery<K, C> setBlockSize(int blockSize) {
return setRowLimit(blockSize);
}
@Override
public AllRowsQuery<K, C> setRowLimit(int rowLimit) {
this.blockSize = rowLimit;
return this;
}
public int getBlockSize() {
return blockSize;
}
@Override
public AllRowsQuery<K, C> setRepeatLastToken(boolean repeatLastToken) {
this.repeatLastToken = repeatLastToken;
return this;
}
public boolean getRepeatLastToken() {
return this.repeatLastToken;
}
protected Integer getConcurrencyLevel() {
return this.nThreads;
}
public AllRowsQuery<K, C> setIncludeEmptyRows(boolean flag) {
this.includeEmptyRows = flag;
return this;
}
public String getStartToken() {
return this.startToken;
}
public String getEndToken() {
return this.endToken;
}
@Override
public AllRowsQuery<K, C> forTokenRange(BigInteger startToken, BigInteger endToken) {
return forTokenRange(startToken.toString(), endToken.toString());
}
public AllRowsQuery<K, C> forTokenRange(String startToken, String endToken) {
this.startToken = startToken;
this.endToken = endToken;
return this;
}
SlicePredicate getPredicate() {
return predicate;
}
Boolean getIncludeEmptyRows() {
return this.includeEmptyRows;
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.protocol.compatibility;
import com.google.common.collect.ImmutableMap;
import com.hazelcast.cache.impl.CacheEventData;
import com.hazelcast.client.impl.client.DistributedObjectInfo;
import com.hazelcast.client.impl.protocol.codec.builtin.CustomTypeFactory;
import com.hazelcast.client.impl.protocol.codec.holder.AnchorDataListHolder;
import com.hazelcast.client.impl.protocol.codec.holder.CacheConfigHolder;
import com.hazelcast.client.impl.protocol.codec.holder.PagingPredicateHolder;
import com.hazelcast.client.impl.protocol.exception.ErrorHolder;
import com.hazelcast.client.impl.protocol.task.dynamicconfig.EvictionConfigHolder;
import com.hazelcast.client.impl.protocol.task.dynamicconfig.ListenerConfigHolder;
import com.hazelcast.client.impl.protocol.task.dynamicconfig.MapStoreConfigHolder;
import com.hazelcast.client.impl.protocol.task.dynamicconfig.NearCacheConfigHolder;
import com.hazelcast.client.impl.protocol.task.dynamicconfig.PredicateConfigHolder;
import com.hazelcast.client.impl.protocol.task.dynamicconfig.QueryCacheConfigHolder;
import com.hazelcast.client.impl.protocol.task.dynamicconfig.QueueStoreConfigHolder;
import com.hazelcast.client.impl.protocol.task.dynamicconfig.RingbufferStoreConfigHolder;
import com.hazelcast.cluster.Address;
import com.hazelcast.config.AttributeConfig;
import com.hazelcast.config.BitmapIndexOptions;
import com.hazelcast.config.CacheSimpleConfig.ExpiryPolicyFactoryConfig.DurationConfig;
import com.hazelcast.config.CacheSimpleConfig.ExpiryPolicyFactoryConfig.TimedExpiryPolicyFactoryConfig;
import com.hazelcast.config.CacheSimpleEntryListenerConfig;
import com.hazelcast.config.EventJournalConfig;
import com.hazelcast.config.HotRestartConfig;
import com.hazelcast.config.IndexConfig;
import com.hazelcast.config.MergePolicyConfig;
import com.hazelcast.config.MerkleTreeConfig;
import com.hazelcast.config.NearCachePreloaderConfig;
import com.hazelcast.config.WanReplicationRef;
import com.hazelcast.cp.CPMember;
import com.hazelcast.cp.internal.CPMemberInfo;
import com.hazelcast.cp.internal.RaftGroupId;
import com.hazelcast.instance.EndpointQualifier;
import com.hazelcast.instance.ProtocolType;
import com.hazelcast.internal.cluster.MemberInfo;
import com.hazelcast.internal.management.dto.ClientBwListEntryDTO;
import com.hazelcast.internal.management.dto.MCEventDTO;
import com.hazelcast.internal.partition.MigrationStateImpl;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.internal.serialization.impl.HeapData;
import com.hazelcast.internal.serialization.impl.compact.FieldDescriptor;
import com.hazelcast.internal.serialization.impl.compact.Schema;
import com.hazelcast.map.impl.SimpleEntryView;
import com.hazelcast.map.impl.querycache.event.DefaultQueryCacheEventData;
import com.hazelcast.map.impl.querycache.event.QueryCacheEventData;
import com.hazelcast.partition.MigrationState;
import com.hazelcast.scheduledexecutor.ScheduledTaskHandler;
import com.hazelcast.scheduledexecutor.impl.ScheduledTaskHandlerImpl;
import com.hazelcast.sql.SqlColumnMetadata;
import com.hazelcast.sql.SqlColumnType;
import com.hazelcast.sql.impl.QueryId;
import com.hazelcast.sql.impl.client.SqlError;
import com.hazelcast.sql.impl.client.SqlPage;
import com.hazelcast.transaction.impl.xa.SerializableXID;
import com.hazelcast.version.MemberVersion;
import javax.transaction.xa.Xid;
import java.lang.reflect.Array;
import java.net.UnknownHostException;
import java.util.AbstractMap;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.UUID;
public class ReferenceObjects {
public static boolean isEqual(Object a, Object b) {
if (a == b) {
return true;
}
if (a == null || b == null) {
return false;
}
if (a.getClass().isArray() && b.getClass().isArray()) {
int length = Array.getLength(a);
if (length > 0 && !a.getClass().getComponentType().equals(b.getClass().getComponentType())) {
return false;
}
if (Array.getLength(b) != length) {
return false;
}
for (int i = 0; i < length; i++) {
Object aElement = Array.get(a, i);
Object bElement = Array.get(b, i);
if (aElement instanceof StackTraceElement && bElement instanceof StackTraceElement) {
if (!isEqualStackTrace((StackTraceElement) aElement, (StackTraceElement) bElement)) {
return false;
}
}
if (!isEqual(aElement, bElement)) {
return false;
}
}
return true;
}
if (a instanceof List && b instanceof List) {
ListIterator e1 = ((List) a).listIterator();
ListIterator e2 = ((List) b).listIterator();
while (e1.hasNext() && e2.hasNext()) {
Object o1 = e1.next();
Object o2 = e2.next();
if (!isEqual(o1, o2)) {
return false;
}
}
return !(e1.hasNext() || e2.hasNext());
}
if (a instanceof Entry && b instanceof Entry) {
final Entry entryA = (Entry) a;
final Entry entryB = (Entry) b;
return isEqual(entryA.getKey(), entryB.getKey()) && isEqual(entryA.getValue(), entryB.getValue());
}
// following classes are list elements and have to be explicitly cast
if (a instanceof ListenerConfigHolder && b instanceof ListenerConfigHolder) {
return isEqual((ListenerConfigHolder) a, (ListenerConfigHolder) b);
}
if (a instanceof IndexConfig && b instanceof IndexConfig) {
return isEqual((IndexConfig) a, (IndexConfig) b);
}
if (a instanceof AttributeConfig && b instanceof AttributeConfig) {
return isEqual((AttributeConfig) a, (AttributeConfig) b);
}
if (a instanceof QueryCacheConfigHolder && b instanceof QueryCacheConfigHolder) {
return isEqual((QueryCacheConfigHolder) a, (QueryCacheConfigHolder) b);
}
if (a instanceof CacheSimpleEntryListenerConfig && b instanceof CacheSimpleEntryListenerConfig) {
return isEqual((CacheSimpleEntryListenerConfig) a, (CacheSimpleEntryListenerConfig) b);
}
return a.equals(b);
}
public static boolean isEqual(CacheConfigHolder a, CacheConfigHolder b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (!a.getName().equals(b.getName())) {
return false;
}
if (!Objects.equals(a.getManagerPrefix(), b.getManagerPrefix())) {
return false;
}
if (!Objects.equals(a.getUriString(), b.getUriString())) {
return false;
}
if (a.getBackupCount() != b.getBackupCount()) {
return false;
}
if (a.getAsyncBackupCount() != b.getAsyncBackupCount()) {
return false;
}
if (!a.getInMemoryFormat().equals(b.getInMemoryFormat())) {
return false;
}
if (!isEqual(a.getEvictionConfigHolder(), b.getEvictionConfigHolder())) {
return false;
}
if (!isEqual(a.getWanReplicationRef(), b.getWanReplicationRef())) {
return false;
}
if (!a.getKeyClassName().equals(b.getKeyClassName())) {
return false;
}
if (!a.getValueClassName().equals(b.getValueClassName())) {
return false;
}
if (!Objects.equals(a.getCacheLoaderFactory(), b.getCacheLoaderFactory())) {
return false;
}
if (!Objects.equals(a.getCacheWriterFactory(), b.getCacheWriterFactory())) {
return false;
}
if (!a.getExpiryPolicyFactory().equals(b.getExpiryPolicyFactory())) {
return false;
}
if (a.isReadThrough() != b.isReadThrough()) {
return false;
}
if (a.isWriteThrough() != b.isWriteThrough()) {
return false;
}
if (a.isStoreByValue() != b.isStoreByValue()) {
return false;
}
if (a.isManagementEnabled() != b.isManagementEnabled()) {
return false;
}
if (a.isStatisticsEnabled() != b.isStatisticsEnabled()) {
return false;
}
if (!isEqual(a.getHotRestartConfig(), b.getHotRestartConfig())) {
return false;
}
if (!isEqual(a.getEventJournalConfig(), b.getEventJournalConfig())) {
return false;
}
if (!Objects.equals(a.getSplitBrainProtectionName(), b.getSplitBrainProtectionName())) {
return false;
}
if (!Objects.equals(a.getListenerConfigurations(), b.getListenerConfigurations())) {
return false;
}
if (!isEqual(a.getMergePolicyConfig(), b.getMergePolicyConfig())) {
return false;
}
return a.isDisablePerEntryInvalidationEvents() == b.isDisablePerEntryInvalidationEvents();
}
public static boolean isEqual(WanReplicationRef a, WanReplicationRef b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.isRepublishingEnabled() != b.isRepublishingEnabled()) {
return false;
}
if (!a.getName().equals(b.getName())) {
return false;
}
if (!a.getMergePolicyClassName().equals(b.getMergePolicyClassName())) {
return false;
}
return a.getFilters() != null ? a.getFilters().equals(b.getFilters()) : b.getFilters() == null;
}
public static boolean isEqual(NearCachePreloaderConfig a, NearCachePreloaderConfig b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.isEnabled() != b.isEnabled()) {
return false;
}
if (a.getStoreInitialDelaySeconds() != b.getStoreInitialDelaySeconds()) {
return false;
}
if (a.getStoreIntervalSeconds() != b.getStoreIntervalSeconds()) {
return false;
}
return a.getDirectory() != null ? a.getDirectory().equals(b.getDirectory()) : b.getDirectory() == null;
}
public static boolean isEqual(NearCacheConfigHolder a, NearCacheConfigHolder that) {
if (a == that) {
return true;
}
if (that == null) {
return false;
}
if (a.isSerializeKeys() != that.isSerializeKeys()) {
return false;
}
if (a.isInvalidateOnChange() != that.isInvalidateOnChange()) {
return false;
}
if (a.getTimeToLiveSeconds() != that.getTimeToLiveSeconds()) {
return false;
}
if (a.getMaxIdleSeconds() != that.getMaxIdleSeconds()) {
return false;
}
if (a.isCacheLocalEntries() != that.isCacheLocalEntries()) {
return false;
}
if (!a.getName().equals(that.getName())) {
return false;
}
if (!a.getInMemoryFormat().equals(that.getInMemoryFormat())) {
return false;
}
if (!isEqual(a.getEvictionConfigHolder(), that.getEvictionConfigHolder())) {
return false;
}
if (!a.getLocalUpdatePolicy().equals(that.getLocalUpdatePolicy())) {
return false;
}
return a.getPreloaderConfig() != null ? isEqual(a.getPreloaderConfig(), that.getPreloaderConfig())
: that.getPreloaderConfig() == null;
}
public static boolean isEqual(EvictionConfigHolder a, EvictionConfigHolder b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.getSize() != b.getSize()) {
return false;
}
if (!a.getMaxSizePolicy().equals(b.getMaxSizePolicy())) {
return false;
}
if (!a.getEvictionPolicy().equals(b.getEvictionPolicy())) {
return false;
}
if (a.getComparatorClassName() != null ? !a.getComparatorClassName().equals(b.getComparatorClassName()) :
b.getComparatorClassName() != null) {
return false;
}
return a.getComparator() != null ? a.getComparator().equals(b.getComparator())
: b.getComparator() == null;
}
public static boolean isEqual(ListenerConfigHolder a, ListenerConfigHolder b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.isIncludeValue() != b.isIncludeValue()) {
return false;
}
if (a.isLocal() != b.isLocal()) {
return false;
}
if (a.getListenerType() != b.getListenerType()) {
return false;
}
if (a.getClassName() != null ? !a.getClassName().equals(b.getClassName())
: b.getClassName() != null) {
return false;
}
return a.getListenerImplementation() != null
? a.getListenerImplementation().equals(b.getListenerImplementation())
: b.getListenerImplementation() == null;
}
public static boolean isEqual(IndexConfig a, IndexConfig that) {
if (a == that) {
return true;
}
if (that == null) {
return false;
}
if (a.getType() != that.getType()) {
return false;
}
if (a.getName() != null ? !a.getName().equals(that.getName()) : that.getName() != null) {
return false;
}
return a.getAttributes() != null ? a.getAttributes().equals(that.getAttributes()) : that.getAttributes() == null;
}
public static boolean isEqual(AttributeConfig a, AttributeConfig that) {
if (a == that) {
return true;
}
if (that == null) {
return false;
}
if (a.getName() != null ? !a.getName().equals(that.getName()) : that.getName() != null) {
return false;
}
return a.getExtractorClassName() != null ? a.getExtractorClassName().equals(that.getExtractorClassName())
: that.getExtractorClassName() == null;
}
public static boolean isEqual(MapStoreConfigHolder a, MapStoreConfigHolder b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.isEnabled() != b.isEnabled()) {
return false;
}
if (a.isWriteCoalescing() != b.isWriteCoalescing()) {
return false;
}
if (a.getWriteBatchSize() != b.getWriteBatchSize()) {
return false;
}
if (a.getClassName() != null ? !a.getClassName().equals(b.getClassName()) : b.getClassName() != null) {
return false;
}
if (a.getFactoryClassName() != null ? !a.getFactoryClassName().equals(b.getFactoryClassName())
: b.getFactoryClassName() != null) {
return false;
}
if (a.getImplementation() != null ? !a.getImplementation().equals(b.getImplementation()) :
b.getImplementation() != null) {
return false;
}
if (a.getFactoryImplementation() != null ? !a.getFactoryImplementation().equals(b.getFactoryImplementation())
: b.getFactoryImplementation() != null) {
return false;
}
if (a.getProperties() != null ? !a.getProperties().equals(b.getProperties()) : b.getProperties() != null) {
return false;
}
return a.getInitialLoadMode() != null ? a.getInitialLoadMode().equals(b.getInitialLoadMode()) :
b.getInitialLoadMode() == null;
}
public static boolean isEqual(QueryCacheConfigHolder a, QueryCacheConfigHolder b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.getBatchSize() != b.getBatchSize()) {
return false;
}
if (a.getBufferSize() != b.getBufferSize()) {
return false;
}
if (a.getDelaySeconds() != b.getDelaySeconds()) {
return false;
}
if (a.isIncludeValue() != b.isIncludeValue()) {
return false;
}
if (a.isPopulate() != b.isPopulate()) {
return false;
}
if (a.isCoalesce() != b.isCoalesce()) {
return false;
}
if (!a.getInMemoryFormat().equals(b.getInMemoryFormat())) {
return false;
}
if (!a.getName().equals(b.getName())) {
return false;
}
if (!isEqual(a.getPredicateConfigHolder(), b.getPredicateConfigHolder())) {
return false;
}
if (!isEqual(a.getEvictionConfigHolder(), b.getEvictionConfigHolder())) {
return false;
}
if (a.getListenerConfigs() != null ? !isEqual(a.getListenerConfigs(), b.getListenerConfigs())
: b.getListenerConfigs() != null) {
return false;
}
return a.getIndexConfigs() != null ? isEqual(a.getIndexConfigs(), b.getIndexConfigs())
: b.getIndexConfigs() == null;
}
public static boolean isEqual(PredicateConfigHolder a, PredicateConfigHolder b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.getClassName() != null ? !a.getClassName().equals(b.getClassName()) : a.getClassName() != null) {
return false;
}
if (a.getSql() != null ? !a.getSql().equals(b.getSql()) : b.getSql() != null) {
return false;
}
return a.getImplementation() != null ? a.getImplementation().equals(b.getImplementation()) :
b.getImplementation() == null;
}
public static boolean isEqual(QueueStoreConfigHolder a, QueueStoreConfigHolder b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.isEnabled() != b.isEnabled()) {
return false;
}
if (a.getClassName() != null ? !a.getClassName().equals(b.getClassName()) : b.getClassName() != null) {
return false;
}
if (a.getFactoryClassName() != null ? !a.getFactoryClassName().equals(b.getFactoryClassName()) :
b.getFactoryClassName() != null) {
return false;
}
if (a.getImplementation() != null ? !a.getImplementation().equals(b.getImplementation()) :
b.getImplementation() != null) {
return false;
}
if (a.getFactoryClassName() != null ? !a.getFactoryImplementation().equals(b.getFactoryImplementation())
: b.getFactoryImplementation() != null) {
return false;
}
return a.getProperties() != null ? a.getProperties().equals(b.getProperties())
: b.getProperties() == null;
}
public static boolean isEqual(HotRestartConfig a, HotRestartConfig b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.isEnabled() != b.isEnabled()) {
return false;
}
return a.isFsync() == b.isFsync();
}
public static boolean isEqual(TimedExpiryPolicyFactoryConfig a, TimedExpiryPolicyFactoryConfig b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.getExpiryPolicyType() != b.getExpiryPolicyType()) {
return false;
}
if (a.getDurationConfig().getDurationAmount() != b.getDurationConfig().getDurationAmount()) {
return false;
}
return a.getDurationConfig().getTimeUnit() == b.getDurationConfig().getTimeUnit();
}
public static boolean isEqual(CacheSimpleEntryListenerConfig a, CacheSimpleEntryListenerConfig b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.isOldValueRequired() != b.isOldValueRequired()) {
return false;
}
if (a.isSynchronous() != b.isSynchronous()) {
return false;
}
if (!a.getCacheEntryEventFilterFactory().equals(b.getCacheEntryEventFilterFactory())) {
return false;
}
return a.getCacheEntryListenerFactory().equals(b.getCacheEntryListenerFactory());
}
public static boolean isEqual(RingbufferStoreConfigHolder a, RingbufferStoreConfigHolder b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (a.isEnabled() != b.isEnabled()) {
return false;
}
if (a.getClassName() != null ? !a.getClassName().equals(b.getClassName()) : b.getClassName() != null) {
return false;
}
if (a.getFactoryClassName() != null ? !a.getFactoryClassName().equals(b.getFactoryClassName()) :
b.getFactoryClassName() != null) {
return false;
}
if (a.getImplementation() != null ? !a.getImplementation().equals(b.getImplementation()) :
b.getImplementation() != null) {
return false;
}
if (a.getFactoryImplementation() != null ? !a.getFactoryImplementation().equals(b.getFactoryImplementation()) :
b.getFactoryImplementation() != null) {
return false;
}
return a.getProperties() != null ? a.getProperties().equals(b.getProperties()) : b.getProperties() == null;
}
private static boolean isEqualStackTrace(StackTraceElement stackTraceElement1, StackTraceElement stackTraceElement2) {
//Not using stackTraceElement.equals
//because in IBM JDK stacktraceElements with null method name are not equal
if (!isEqual(stackTraceElement1.getClassName(), stackTraceElement2.getClassName())) {
return false;
}
if (!isEqual(stackTraceElement1.getMethodName(), stackTraceElement2.getMethodName())) {
return false;
}
if (!isEqual(stackTraceElement1.getFileName(), stackTraceElement2.getFileName())) {
return false;
}
return isEqual(stackTraceElement1.getLineNumber(), stackTraceElement2.getLineNumber());
}
// Static values below should not be a random value, because the values are used when generating compatibility files and
// when testing against them. Random values causes test failures.
public static boolean aBoolean = true;
public static byte aByte = 113;
public static int anInt = 25;
public static int anEnum = 1;
public static long aLong = -50992225L;
public static UUID aUUID = new UUID(123456789, 987654321);
public static byte[] aByteArray = new byte[]{aByte};
public static long[] aLongArray = new long[]{aLong};
public static String aString = "localhost";
public static Data aData = new HeapData("111313123131313131".getBytes());
public static List<Map.Entry<Integer, UUID>> aListOfIntegerToUUID
= Collections.singletonList(new AbstractMap.SimpleEntry<>(anInt, aUUID));
public static List<Map.Entry<Integer, Long>> aListOfIntegerToLong
= Collections.singletonList(new AbstractMap.SimpleEntry<>(anInt, aLong));
public static List<Map.Entry<Integer, Integer>> aListOfIntegerToInteger
= Collections.singletonList(new AbstractMap.SimpleEntry<>(anInt, anInt));
public static List<Map.Entry<UUID, Long>> aListOfUuidToLong
= Collections.singletonList(new AbstractMap.SimpleEntry<>(aUUID, aLong));
public static List<Map.Entry<UUID, UUID>> aListOfUUIDToUUID
= Collections.singletonList(new AbstractMap.SimpleEntry<>(aUUID, aUUID));
public static List<Integer> aListOfIntegers = Collections.singletonList(anInt);
public static List<Long> aListOfLongs = Collections.singletonList(aLong);
public static List<UUID> aListOfUUIDs = Collections.singletonList(aUUID);
public static Address anAddress;
public static CPMember aCpMember;
public static List<CPMember> aListOfCpMembers;
public static MigrationState aMigrationState = new MigrationStateImpl(aLong, anInt, anInt, aLong);
public static FieldDescriptor aFieldDescriptor = CustomTypeFactory.createFieldDescriptor(aString, anInt);
public static List<FieldDescriptor> aListOfFieldDescriptors = Collections.singletonList(aFieldDescriptor);
public static Schema aSchema = CustomTypeFactory.createSchema(aString, aListOfFieldDescriptors);
public static List<Schema> aListOfSchemas = Collections.singletonList(aSchema);
static {
try {
anAddress = new Address(aString, anInt);
} catch (UnknownHostException e) {
e.printStackTrace();
}
aCpMember = new CPMemberInfo(aUUID, anAddress);
aListOfCpMembers = Collections.singletonList(aCpMember);
}
public static List<Map.Entry<UUID, List<Integer>>> aListOfUUIDToListOfIntegers
= Collections.singletonList(new AbstractMap.SimpleEntry<>(aUUID, aListOfIntegers));
public static Map<String, String> aMapOfStringToString = Collections.singletonMap(aString, aString);
public static List<String> aListOfStrings = Collections.singletonList(aString);
public static StackTraceElement aStackTraceElement = new StackTraceElement(aString, aString, aString, anInt);
public static List<StackTraceElement> aListOfStackTraceElements = Collections.singletonList(aStackTraceElement);
public static CacheEventData aCacheEventData
= CustomTypeFactory.createCacheEventData(aString, anEnum, aData, aData, aData, aBoolean);
public static DistributedObjectInfo aDistributedObjectInfo = new DistributedObjectInfo(aString, aString);
public static DefaultQueryCacheEventData aQueryCacheEventData;
public static MCEventDTO aMCEvent = new MCEventDTO(aLong, anInt, aString);
public static List<MCEventDTO> aListOfMCEvents = Collections.singletonList(aMCEvent);
static {
aQueryCacheEventData = new DefaultQueryCacheEventData();
aQueryCacheEventData.setDataKey(aData);
aQueryCacheEventData.setDataNewValue(aData);
aQueryCacheEventData.setSequence(aLong);
aQueryCacheEventData.setEventType(anInt);
aQueryCacheEventData.setPartitionId(anInt);
}
public static RaftGroupId aRaftGroupId = new RaftGroupId(aString, aLong, aLong);
public static ScheduledTaskHandler aScheduledTaskHandler = new ScheduledTaskHandlerImpl(aUUID, anInt, aString, aString);
public static SimpleEntryView<Data, Data> aSimpleEntryView = new SimpleEntryView<>(aData, aData);
static {
aSimpleEntryView.setCost(aLong);
aSimpleEntryView.setCreationTime(aLong);
aSimpleEntryView.setExpirationTime(aLong);
aSimpleEntryView.setHits(aLong);
aSimpleEntryView.setLastAccessTime(aLong);
aSimpleEntryView.setLastStoredTime(aLong);
aSimpleEntryView.setLastUpdateTime(aLong);
aSimpleEntryView.setVersion(aLong);
aSimpleEntryView.setTtl(aLong);
aSimpleEntryView.setMaxIdle(aLong);
}
public static WanReplicationRef aWanReplicationRef = new WanReplicationRef(aString, aString, aListOfStrings, aBoolean);
public static Xid anXid = new SerializableXID(anInt, aByteArray, aByteArray);
public static ErrorHolder anErrorHolder = new ErrorHolder(anInt, aString, aString, aListOfStackTraceElements);
public static CacheSimpleEntryListenerConfig aCacheSimpleEntryListenerConfig;
static {
aCacheSimpleEntryListenerConfig = new CacheSimpleEntryListenerConfig();
aCacheSimpleEntryListenerConfig.setOldValueRequired(aBoolean);
aCacheSimpleEntryListenerConfig.setSynchronous(aBoolean);
aCacheSimpleEntryListenerConfig.setCacheEntryListenerFactory(aString);
aCacheSimpleEntryListenerConfig.setCacheEntryEventFilterFactory(aString);
}
public static EventJournalConfig anEventJournalConfig;
static {
anEventJournalConfig = new EventJournalConfig();
anEventJournalConfig.setEnabled(aBoolean);
anEventJournalConfig.setCapacity(anInt);
anEventJournalConfig.setTimeToLiveSeconds(anInt);
}
public static EvictionConfigHolder anEvictionConfigHolder = new EvictionConfigHolder(anInt, aString, aString, aString, aData);
public static HotRestartConfig aHotRestartConfig;
static {
aHotRestartConfig = new HotRestartConfig();
aHotRestartConfig.setEnabled(aBoolean);
aHotRestartConfig.setFsync(aBoolean);
}
public static MerkleTreeConfig aMerkleTreeConfig;
static {
aMerkleTreeConfig = new MerkleTreeConfig();
aMerkleTreeConfig.setEnabled(aBoolean);
aMerkleTreeConfig.setDepth(anInt);
}
public static ListenerConfigHolder aListenerConfigHolder = new ListenerConfigHolder(ListenerConfigHolder.ListenerConfigType.ITEM, aData, aString, aBoolean, aBoolean);
public static AttributeConfig anAttributeConfig = new AttributeConfig(aString, aString);
public static BitmapIndexOptions aBitmapIndexOptions;
static {
aBitmapIndexOptions = new BitmapIndexOptions();
aBitmapIndexOptions.setUniqueKey(aString);
aBitmapIndexOptions.setUniqueKeyTransformation(BitmapIndexOptions.UniqueKeyTransformation.LONG);
}
public static IndexConfig anIndexConfig = CustomTypeFactory.createIndexConfig(aString, anEnum, aListOfStrings, aBitmapIndexOptions);
public static MapStoreConfigHolder aMapStoreConfigHolder = new MapStoreConfigHolder(aBoolean, aBoolean, anInt, anInt, aString, aData, aString, aData, aMapOfStringToString, aString);
public static NearCachePreloaderConfig aNearCachePreloaderConfig = new NearCachePreloaderConfig(aBoolean, aString);
static {
aNearCachePreloaderConfig.setStoreInitialDelaySeconds(anInt);
aNearCachePreloaderConfig.setStoreIntervalSeconds(anInt);
}
public static NearCacheConfigHolder aNearCacheConfigHolder = new NearCacheConfigHolder(aString, aString, aBoolean, aBoolean, anInt, anInt, anEvictionConfigHolder, aBoolean, aString, aNearCachePreloaderConfig);
public static PredicateConfigHolder aPredicateConfigHolder = new PredicateConfigHolder(aString, aString, aData);
public static List<ListenerConfigHolder> aListOfListenerConfigHolders = Collections.singletonList(aListenerConfigHolder);
public static List<IndexConfig> aListOfIndexConfigs = Collections.singletonList(anIndexConfig);
public static QueryCacheConfigHolder aQueryCacheConfigHolder = new QueryCacheConfigHolder(anInt, anInt, anInt, aBoolean, aBoolean, aBoolean, aString, aString, aPredicateConfigHolder, anEvictionConfigHolder, aListOfListenerConfigHolders, aListOfIndexConfigs);
public static QueueStoreConfigHolder aQueueStoreConfigHolder = new QueueStoreConfigHolder(aString, aString, aData, aData, aMapOfStringToString, aBoolean);
public static RingbufferStoreConfigHolder aRingbufferStoreConfigHolder = new RingbufferStoreConfigHolder(aString, aString, aData, aData, aMapOfStringToString, aBoolean);
public static DurationConfig aDurationConfig = CustomTypeFactory.createDurationConfig(aLong, anEnum);
public static TimedExpiryPolicyFactoryConfig aTimedExpiryPolicyFactoryConfig = CustomTypeFactory.createTimedExpiryPolicyFactoryConfig(anEnum, aDurationConfig);
public static ClientBwListEntryDTO aClientBwListEntry = CustomTypeFactory.createClientBwListEntry(anEnum, aString);
public static List<Map.Entry<String, String>> aListOfStringToString
= Collections.singletonList(new AbstractMap.SimpleEntry<>(aString, aString));
public static List<Map.Entry<String, byte[]>> aListOfStringToByteArray
= Collections.singletonList(new AbstractMap.SimpleEntry<>(aString, aByteArray));
public static List<Map.Entry<Long, byte[]>> aListOfLongToByteArray
= Collections.singletonList(new AbstractMap.SimpleEntry<>(aLong, aByteArray));
public static List<Map.Entry<String, List<Map.Entry<Integer, Long>>>> aListOfStringToListOfIntegerToLong
= Collections.singletonList(new AbstractMap.SimpleEntry<>(aString, aListOfIntegerToLong));
public static List<Map.Entry<Data, Data>> aListOfDataToData
= Collections.singletonList(new AbstractMap.SimpleEntry<>(aData, aData));
public static List<CacheEventData> aListOfCacheEventData = Collections.singletonList(aCacheEventData);
public static List<CacheSimpleEntryListenerConfig> aListOfCacheSimpleEntryListenerConfigs
= Collections.singletonList(aCacheSimpleEntryListenerConfig);
public static List<Data> aListOfData = Collections.singletonList(aData);
public static List<Object> aListOfObject = Collections.singletonList(anInt);
public static List<Collection<Data>> aListOfListOfData = Collections.singletonList(aListOfData);
public static List<Collection<Object>> aListOfListOfObject = Collections.singletonList(aListOfObject);
public static Collection<Map.Entry<Data, Collection<Data>>> aListOfDataToListOfData
= Collections.singletonList(new AbstractMap.SimpleEntry<>(aData, aListOfData));
public static List<DistributedObjectInfo> aListOfDistributedObjectInfo = Collections.singletonList(aDistributedObjectInfo);
public static List<AttributeConfig> aListOfAttributeConfigs = Collections.singletonList(anAttributeConfig);
public static List<QueryCacheConfigHolder> aListOfQueryCacheConfigHolders = Collections.singletonList(aQueryCacheConfigHolder);
public static List<QueryCacheEventData> aListOfQueryCacheEventData = Collections.singletonList(aQueryCacheEventData);
public static List<ScheduledTaskHandler> aListOfScheduledTaskHandler = Collections.singletonList(aScheduledTaskHandler);
public static List<Xid> aListOfXids = Collections.singletonList(anXid);
public static List<ClientBwListEntryDTO> aListOfClientBwListEntries = Collections.singletonList(aClientBwListEntry);
public static MergePolicyConfig aMergePolicyConfig = new MergePolicyConfig(aString, anInt);
public static CacheConfigHolder aCacheConfigHolder = new CacheConfigHolder(aString, aString, aString, anInt, anInt,
aString, anEvictionConfigHolder, aWanReplicationRef, aString, aString, aData, aData, aData, aBoolean,
aBoolean, aBoolean, aBoolean, aBoolean, aHotRestartConfig, anEventJournalConfig, aString, aListOfData,
aMergePolicyConfig, aBoolean, aListOfListenerConfigHolders, aBoolean, aMerkleTreeConfig);
private static MemberVersion aMemberVersion = new MemberVersion(aByte, aByte, aByte);
public static Collection<MemberInfo> aListOfMemberInfos = Collections.singletonList(new MemberInfo(anAddress, aUUID, aMapOfStringToString, aBoolean, aMemberVersion,
ImmutableMap.of(EndpointQualifier.resolve(ProtocolType.WAN, "localhost"), anAddress)));
public static AnchorDataListHolder anAnchorDataListHolder = new AnchorDataListHolder(aListOfIntegers, aListOfDataToData);
public static PagingPredicateHolder aPagingPredicateHolder = new PagingPredicateHolder(anAnchorDataListHolder, aData, aData,
anInt, anInt, aByte, aData);
public static QueryId anSqlQueryId = new QueryId(aLong, aLong, aLong, aLong);
public static SqlColumnMetadata anSqlColumnMetadata = CustomTypeFactory.createSqlColumnMetadata(aString, SqlColumnType.BOOLEAN.getId(), aBoolean, aBoolean);
public static List<SqlColumnMetadata> aListOfSqlColumnMetadata = Collections.singletonList(anSqlColumnMetadata);
public static SqlError anSqlError = new SqlError(anInt, aString, aUUID, aBoolean, aString);
public static SqlPage aSqlPage = SqlPage.fromColumns(Collections.singletonList(SqlColumnType.INTEGER), Collections.singletonList(Arrays.asList(1, 2, 3, 4)), true);
}
| |
/*
* Copyright 2014, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.grpc.stub;
import io.grpc.Metadata;
import io.grpc.MethodDescriptor;
import io.grpc.ServerCall;
import io.grpc.ServerCallHandler;
import io.grpc.Status;
/**
* Utility functions for adapting {@link ServerCallHandler}s to application service implementation,
* meant to be used by the generated code.
*/
public class ServerCalls {
private ServerCalls() {
}
/**
* Creates a {@code ServerCallHandler} for a unary call method of the service.
*
* @param method an adaptor to the actual method on the service implementation.
*/
public static <ReqT, RespT> ServerCallHandler<ReqT, RespT> asyncUnaryCall(
final UnaryMethod<ReqT, RespT> method) {
return asyncUnaryRequestCall(method);
}
/**
* Creates a {@code ServerCallHandler} for a server streaming method of the service.
*
* @param method an adaptor to the actual method on the service implementation.
*/
public static <ReqT, RespT> ServerCallHandler<ReqT, RespT> asyncServerStreamingCall(
final ServerStreamingMethod<ReqT, RespT> method) {
return asyncUnaryRequestCall(method);
}
/**
* Creates a {@code ServerCallHandler} for a client streaming method of the service.
*
* @param method an adaptor to the actual method on the service implementation.
*/
public static <ReqT, RespT> ServerCallHandler<ReqT, RespT> asyncClientStreamingCall(
final ClientStreamingMethod<ReqT, RespT> method) {
return asyncStreamingRequestCall(method);
}
/**
* Creates a {@code ServerCallHandler} for a bidi streaming method of the service.
*
* @param method an adaptor to the actual method on the service implementation.
*/
public static <ReqT, RespT> ServerCallHandler<ReqT, RespT> asyncBidiStreamingCall(
final BidiStreamingMethod<ReqT, RespT> method) {
return asyncStreamingRequestCall(method);
}
/**
* Adaptor to a unary call method.
*/
public static interface UnaryMethod<ReqT, RespT> extends UnaryRequestMethod<ReqT, RespT> {
}
/**
* Adaptor to a server streaming method.
*/
public static interface ServerStreamingMethod<ReqT, RespT>
extends UnaryRequestMethod<ReqT, RespT> {
}
/**
* Adaptor to a client streaming method.
*/
public static interface ClientStreamingMethod<ReqT, RespT>
extends StreamingRequestMethod<ReqT, RespT> {
}
/**
* Adaptor to a bi-directional streaming method.
*/
public static interface BidiStreamingMethod<ReqT, RespT>
extends StreamingRequestMethod<ReqT, RespT> {
}
/**
* Creates a {@code ServerCallHandler} for a unary request call method of the service.
*
* @param method an adaptor to the actual method on the service implementation.
*/
private static <ReqT, RespT> ServerCallHandler<ReqT, RespT> asyncUnaryRequestCall(
final UnaryRequestMethod<ReqT, RespT> method) {
return new ServerCallHandler<ReqT, RespT>() {
@Override
public ServerCall.Listener<ReqT> startCall(
MethodDescriptor<ReqT, RespT> methodDescriptor,
final ServerCall<RespT> call,
Metadata headers) {
final ResponseObserver<RespT> responseObserver = new ResponseObserver<RespT>(call);
// We expect only 1 request, but we ask for 2 requests here so that if a misbehaving client
// sends more than 1 requests, we will catch it in onMessage() and emit INVALID_ARGUMENT.
call.request(2);
return new EmptyServerCallListener<ReqT>() {
ReqT request;
@Override
public void onMessage(ReqT request) {
if (this.request == null) {
// We delay calling method.invoke() until onHalfClose(), because application may call
// close(OK) inside invoke(), while close(OK) is not allowed before onHalfClose().
this.request = request;
} else {
call.close(
Status.INVALID_ARGUMENT.withDescription(
"More than one request messages for unary call or server streaming call"),
new Metadata());
}
}
@Override
public void onHalfClose() {
if (request != null) {
method.invoke(request, responseObserver);
} else {
call.close(Status.INVALID_ARGUMENT.withDescription("Half-closed without a request"),
new Metadata());
}
}
@Override
public void onCancel() {
responseObserver.cancelled = true;
}
};
}
};
}
/**
* Creates a {@code ServerCallHandler} for a streaming request call method of the service.
*
* @param method an adaptor to the actual method on the service implementation.
*/
private static <ReqT, RespT> ServerCallHandler<ReqT, RespT> asyncStreamingRequestCall(
final StreamingRequestMethod<ReqT, RespT> method) {
return new ServerCallHandler<ReqT, RespT>() {
@Override
public ServerCall.Listener<ReqT> startCall(
MethodDescriptor<ReqT, RespT> methodDescriptor,
final ServerCall<RespT> call,
Metadata headers) {
call.request(1);
final ResponseObserver<RespT> responseObserver = new ResponseObserver<RespT>(call);
final StreamObserver<ReqT> requestObserver = method.invoke(responseObserver);
return new EmptyServerCallListener<ReqT>() {
boolean halfClosed = false;
@Override
public void onMessage(ReqT request) {
requestObserver.onValue(request);
// Request delivery of the next inbound message.
call.request(1);
}
@Override
public void onHalfClose() {
halfClosed = true;
requestObserver.onCompleted();
}
@Override
public void onCancel() {
if (!halfClosed) {
requestObserver.onError(Status.CANCELLED.asException());
}
responseObserver.cancelled = true;
}
};
}
};
}
private static interface UnaryRequestMethod<ReqT, RespT> {
void invoke(ReqT request, StreamObserver<RespT> responseObserver);
}
private static interface StreamingRequestMethod<ReqT, RespT> {
StreamObserver<ReqT> invoke(StreamObserver<RespT> responseObserver);
}
private static class ResponseObserver<RespT> implements StreamObserver<RespT> {
final ServerCall<RespT> call;
volatile boolean cancelled;
ResponseObserver(ServerCall<RespT> call) {
this.call = call;
}
@Override
public void onValue(RespT response) {
if (cancelled) {
throw Status.CANCELLED.asRuntimeException();
}
call.sendMessage(response);
// Request delivery of the next inbound message.
call.request(1);
}
@Override
public void onError(Throwable t) {
call.close(Status.fromThrowable(t), new Metadata());
}
@Override
public void onCompleted() {
if (cancelled) {
throw Status.CANCELLED.asRuntimeException();
} else {
call.close(Status.OK, new Metadata());
}
}
}
private static class EmptyServerCallListener<ReqT> extends ServerCall.Listener<ReqT> {
@Override
public void onMessage(ReqT request) {
}
@Override
public void onHalfClose() {
}
@Override
public void onCancel() {
}
@Override
public void onComplete() {
}
}
}
| |
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.MoreObjects;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.DoNotCall;
import com.google.errorprone.annotations.concurrent.LazyInit;
import com.google.j2objc.annotations.RetainedWith;
import com.google.j2objc.annotations.Weak;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.function.Function;
import java.util.stream.Collector;
import java.util.stream.Stream;
import javax.annotation.CheckForNull;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* A {@link SetMultimap} whose contents will never change, with many other important properties
* detailed at {@link ImmutableCollection}.
*
* <p><b>Warning:</b> As in all {@link SetMultimap}s, do not modify either a key <i>or a value</i>
* of a {@code ImmutableSetMultimap} in a way that affects its {@link Object#equals} behavior.
* Undefined behavior and bugs will result.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/ImmutableCollectionsExplained">immutable collections</a>.
*
* @author Mike Ward
* @since 2.0
*/
@GwtCompatible(serializable = true, emulated = true)
@ElementTypesAreNonnullByDefault
public class ImmutableSetMultimap<K, V> extends ImmutableMultimap<K, V>
implements SetMultimap<K, V> {
/**
* Returns a {@link Collector} that accumulates elements into an {@code ImmutableSetMultimap}
* whose keys and values are the result of applying the provided mapping functions to the input
* elements.
*
* <p>For streams with defined encounter order (as defined in the Ordering section of the {@link
* java.util.stream} Javadoc), that order is preserved, but entries are <a
* href="ImmutableMultimap.html#iteration">grouped by key</a>.
*
* <p>Example:
*
* <pre>{@code
* static final Multimap<Character, String> FIRST_LETTER_MULTIMAP =
* Stream.of("banana", "apple", "carrot", "asparagus", "cherry")
* .collect(toImmutableSetMultimap(str -> str.charAt(0), str -> str.substring(1)));
*
* // is equivalent to
*
* static final Multimap<Character, String> FIRST_LETTER_MULTIMAP =
* new ImmutableSetMultimap.Builder<Character, String>()
* .put('b', "anana")
* .putAll('a', "pple", "sparagus")
* .putAll('c', "arrot", "herry")
* .build();
* }</pre>
*
* @since 21.0
*/
public static <T extends @Nullable Object, K, V>
Collector<T, ?, ImmutableSetMultimap<K, V>> toImmutableSetMultimap(
Function<? super T, ? extends K> keyFunction,
Function<? super T, ? extends V> valueFunction) {
return CollectCollectors.toImmutableSetMultimap(keyFunction, valueFunction);
}
/**
* Returns a {@code Collector} accumulating entries into an {@code ImmutableSetMultimap}. Each
* input element is mapped to a key and a stream of values, each of which are put into the
* resulting {@code Multimap}, in the encounter order of the stream and the encounter order of the
* streams of values.
*
* <p>Example:
*
* <pre>{@code
* static final ImmutableSetMultimap<Character, Character> FIRST_LETTER_MULTIMAP =
* Stream.of("banana", "apple", "carrot", "asparagus", "cherry")
* .collect(
* flatteningToImmutableSetMultimap(
* str -> str.charAt(0),
* str -> str.substring(1).chars().mapToObj(c -> (char) c));
*
* // is equivalent to
*
* static final ImmutableSetMultimap<Character, Character> FIRST_LETTER_MULTIMAP =
* ImmutableSetMultimap.<Character, Character>builder()
* .putAll('b', Arrays.asList('a', 'n', 'a', 'n', 'a'))
* .putAll('a', Arrays.asList('p', 'p', 'l', 'e'))
* .putAll('c', Arrays.asList('a', 'r', 'r', 'o', 't'))
* .putAll('a', Arrays.asList('s', 'p', 'a', 'r', 'a', 'g', 'u', 's'))
* .putAll('c', Arrays.asList('h', 'e', 'r', 'r', 'y'))
* .build();
*
* // after deduplication, the resulting multimap is equivalent to
*
* static final ImmutableSetMultimap<Character, Character> FIRST_LETTER_MULTIMAP =
* ImmutableSetMultimap.<Character, Character>builder()
* .putAll('b', Arrays.asList('a', 'n'))
* .putAll('a', Arrays.asList('p', 'l', 'e', 's', 'a', 'r', 'g', 'u'))
* .putAll('c', Arrays.asList('a', 'r', 'o', 't', 'h', 'e', 'y'))
* .build();
* }
* }</pre>
*
* @since 21.0
*/
public static <T extends @Nullable Object, K, V>
Collector<T, ?, ImmutableSetMultimap<K, V>> flatteningToImmutableSetMultimap(
Function<? super T, ? extends K> keyFunction,
Function<? super T, ? extends Stream<? extends V>> valuesFunction) {
return CollectCollectors.flatteningToImmutableSetMultimap(keyFunction, valuesFunction);
}
/**
* Returns the empty multimap.
*
* <p><b>Performance note:</b> the instance returned is a singleton.
*/
// Casting is safe because the multimap will never hold any elements.
@SuppressWarnings("unchecked")
public static <K, V> ImmutableSetMultimap<K, V> of() {
return (ImmutableSetMultimap<K, V>) EmptyImmutableSetMultimap.INSTANCE;
}
/** Returns an immutable multimap containing a single entry. */
public static <K, V> ImmutableSetMultimap<K, V> of(K k1, V v1) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order. Repeated occurrences of
* an entry (according to {@link Object#equals}) after the first are ignored.
*/
public static <K, V> ImmutableSetMultimap<K, V> of(K k1, V v1, K k2, V v2) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order. Repeated occurrences of
* an entry (according to {@link Object#equals}) after the first are ignored.
*/
public static <K, V> ImmutableSetMultimap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order. Repeated occurrences of
* an entry (according to {@link Object#equals}) after the first are ignored.
*/
public static <K, V> ImmutableSetMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
builder.put(k4, v4);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order. Repeated occurrences of
* an entry (according to {@link Object#equals}) after the first are ignored.
*/
public static <K, V> ImmutableSetMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
builder.put(k4, v4);
builder.put(k5, v5);
return builder.build();
}
// looking for of() with > 5 entries? Use the builder instead.
/** Returns a new {@link Builder}. */
public static <K, V> Builder<K, V> builder() {
return new Builder<>();
}
/**
* A builder for creating immutable {@code SetMultimap} instances, especially {@code public static
* final} multimaps ("constant multimaps"). Example:
*
* <pre>{@code
* static final Multimap<String, Integer> STRING_TO_INTEGER_MULTIMAP =
* new ImmutableSetMultimap.Builder<String, Integer>()
* .put("one", 1)
* .putAll("several", 1, 2, 3)
* .putAll("many", 1, 2, 3, 4, 5)
* .build();
* }</pre>
*
* <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build
* multiple multimaps in series. Each multimap contains the key-value mappings in the previously
* created multimaps.
*
* @since 2.0
*/
public static final class Builder<K, V> extends ImmutableMultimap.Builder<K, V> {
/**
* Creates a new builder. The returned builder is equivalent to the builder generated by {@link
* ImmutableSetMultimap#builder}.
*/
public Builder() {
super();
}
@Override
Collection<V> newMutableValueCollection() {
return Platform.preservesInsertionOrderOnAddsSet();
}
/** Adds a key-value mapping to the built multimap if it is not already present. */
@CanIgnoreReturnValue
@Override
public Builder<K, V> put(K key, V value) {
super.put(key, value);
return this;
}
/**
* Adds an entry to the built multimap if it is not already present.
*
* @since 11.0
*/
@CanIgnoreReturnValue
@Override
public Builder<K, V> put(Entry<? extends K, ? extends V> entry) {
super.put(entry);
return this;
}
/**
* {@inheritDoc}
*
* @since 19.0
*/
@CanIgnoreReturnValue
@Beta
@Override
public Builder<K, V> putAll(Iterable<? extends Entry<? extends K, ? extends V>> entries) {
super.putAll(entries);
return this;
}
@CanIgnoreReturnValue
@Override
public Builder<K, V> putAll(K key, Iterable<? extends V> values) {
super.putAll(key, values);
return this;
}
@CanIgnoreReturnValue
@Override
public Builder<K, V> putAll(K key, V... values) {
return putAll(key, Arrays.asList(values));
}
@CanIgnoreReturnValue
@Override
public Builder<K, V> putAll(Multimap<? extends K, ? extends V> multimap) {
for (Entry<? extends K, ? extends Collection<? extends V>> entry :
multimap.asMap().entrySet()) {
putAll(entry.getKey(), entry.getValue());
}
return this;
}
@CanIgnoreReturnValue
@Override
Builder<K, V> combine(ImmutableMultimap.Builder<K, V> other) {
super.combine(other);
return this;
}
/**
* {@inheritDoc}
*
* @since 8.0
*/
@CanIgnoreReturnValue
@Override
public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) {
super.orderKeysBy(keyComparator);
return this;
}
/**
* Specifies the ordering of the generated multimap's values for each key.
*
* <p>If this method is called, the sets returned by the {@code get()} method of the generated
* multimap and its {@link Multimap#asMap()} view are {@link ImmutableSortedSet} instances.
* However, serialization does not preserve that property, though it does maintain the key and
* value ordering.
*
* @since 8.0
*/
// TODO: Make serialization behavior consistent.
@CanIgnoreReturnValue
@Override
public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) {
super.orderValuesBy(valueComparator);
return this;
}
/** Returns a newly-created immutable set multimap. */
@Override
public ImmutableSetMultimap<K, V> build() {
Collection<Map.Entry<K, Collection<V>>> mapEntries = builderMap.entrySet();
if (keyComparator != null) {
mapEntries = Ordering.from(keyComparator).<K>onKeys().immutableSortedCopy(mapEntries);
}
return fromMapEntries(mapEntries, valueComparator);
}
}
/**
* Returns an immutable set multimap containing the same mappings as {@code multimap}. The
* generated multimap's key and value orderings correspond to the iteration ordering of the {@code
* multimap.asMap()} view. Repeated occurrences of an entry in the multimap after the first are
* ignored.
*
* <p>Despite the method name, this method attempts to avoid actually copying the data when it is
* safe to do so. The exact circumstances under which a copy will or will not be performed are
* undocumented and subject to change.
*
* @throws NullPointerException if any key or value in {@code multimap} is null
*/
public static <K, V> ImmutableSetMultimap<K, V> copyOf(
Multimap<? extends K, ? extends V> multimap) {
return copyOf(multimap, null);
}
private static <K, V> ImmutableSetMultimap<K, V> copyOf(
Multimap<? extends K, ? extends V> multimap,
@CheckForNull Comparator<? super V> valueComparator) {
checkNotNull(multimap); // eager for GWT
if (multimap.isEmpty() && valueComparator == null) {
return of();
}
if (multimap instanceof ImmutableSetMultimap) {
@SuppressWarnings("unchecked") // safe since multimap is not writable
ImmutableSetMultimap<K, V> kvMultimap = (ImmutableSetMultimap<K, V>) multimap;
if (!kvMultimap.isPartialView()) {
return kvMultimap;
}
}
return fromMapEntries(multimap.asMap().entrySet(), valueComparator);
}
/**
* Returns an immutable multimap containing the specified entries. The returned multimap iterates
* over keys in the order they were first encountered in the input, and the values for each key
* are iterated in the order they were encountered. If two values for the same key are {@linkplain
* Object#equals equal}, the first value encountered is used.
*
* @throws NullPointerException if any key, value, or entry is null
* @since 19.0
*/
@Beta
public static <K, V> ImmutableSetMultimap<K, V> copyOf(
Iterable<? extends Entry<? extends K, ? extends V>> entries) {
return new Builder<K, V>().putAll(entries).build();
}
/** Creates an ImmutableSetMultimap from an asMap.entrySet. */
static <K, V> ImmutableSetMultimap<K, V> fromMapEntries(
Collection<? extends Map.Entry<? extends K, ? extends Collection<? extends V>>> mapEntries,
@CheckForNull Comparator<? super V> valueComparator) {
if (mapEntries.isEmpty()) {
return of();
}
ImmutableMap.Builder<K, ImmutableSet<V>> builder =
new ImmutableMap.Builder<>(mapEntries.size());
int size = 0;
for (Entry<? extends K, ? extends Collection<? extends V>> entry : mapEntries) {
K key = entry.getKey();
Collection<? extends V> values = entry.getValue();
ImmutableSet<V> set = valueSet(valueComparator, values);
if (!set.isEmpty()) {
builder.put(key, set);
size += set.size();
}
}
return new ImmutableSetMultimap<>(builder.buildOrThrow(), size, valueComparator);
}
/**
* Returned by get() when a missing key is provided. Also holds the comparator, if any, used for
* values.
*/
private final transient ImmutableSet<V> emptySet;
ImmutableSetMultimap(
ImmutableMap<K, ImmutableSet<V>> map,
int size,
@CheckForNull Comparator<? super V> valueComparator) {
super(map, size);
this.emptySet = emptySet(valueComparator);
}
// views
/**
* Returns an immutable set of the values for the given key. If no mappings in the multimap have
* the provided key, an empty immutable set is returned. The values are in the same order as the
* parameters used to build this multimap.
*/
@Override
public ImmutableSet<V> get(K key) {
// This cast is safe as its type is known in constructor.
ImmutableSet<V> set = (ImmutableSet<V>) map.get(key);
return MoreObjects.firstNonNull(set, emptySet);
}
@LazyInit @RetainedWith @CheckForNull private transient ImmutableSetMultimap<V, K> inverse;
/**
* {@inheritDoc}
*
* <p>Because an inverse of a set multimap cannot contain multiple pairs with the same key and
* value, this method returns an {@code ImmutableSetMultimap} rather than the {@code
* ImmutableMultimap} specified in the {@code ImmutableMultimap} class.
*/
@Override
public ImmutableSetMultimap<V, K> inverse() {
ImmutableSetMultimap<V, K> result = inverse;
return (result == null) ? (inverse = invert()) : result;
}
private ImmutableSetMultimap<V, K> invert() {
Builder<V, K> builder = builder();
for (Entry<K, V> entry : entries()) {
builder.put(entry.getValue(), entry.getKey());
}
ImmutableSetMultimap<V, K> invertedMultimap = builder.build();
invertedMultimap.inverse = this;
return invertedMultimap;
}
/**
* Guaranteed to throw an exception and leave the multimap unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@CanIgnoreReturnValue
@Deprecated
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final ImmutableSet<V> removeAll(@CheckForNull Object key) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the multimap unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@CanIgnoreReturnValue
@Deprecated
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final ImmutableSet<V> replaceValues(K key, Iterable<? extends V> values) {
throw new UnsupportedOperationException();
}
@LazyInit @RetainedWith @CheckForNull private transient ImmutableSet<Entry<K, V>> entries;
/**
* Returns an immutable collection of all key-value pairs in the multimap. Its iterator traverses
* the values for the first key, the values for the second key, and so on.
*/
@Override
public ImmutableSet<Entry<K, V>> entries() {
ImmutableSet<Entry<K, V>> result = entries;
return result == null ? (entries = new EntrySet<>(this)) : result;
}
private static final class EntrySet<K, V> extends ImmutableSet<Entry<K, V>> {
@Weak private final transient ImmutableSetMultimap<K, V> multimap;
EntrySet(ImmutableSetMultimap<K, V> multimap) {
this.multimap = multimap;
}
@Override
public boolean contains(@CheckForNull Object object) {
if (object instanceof Entry) {
Entry<?, ?> entry = (Entry<?, ?>) object;
return multimap.containsEntry(entry.getKey(), entry.getValue());
}
return false;
}
@Override
public int size() {
return multimap.size();
}
@Override
public UnmodifiableIterator<Entry<K, V>> iterator() {
return multimap.entryIterator();
}
@Override
boolean isPartialView() {
return false;
}
}
private static <V> ImmutableSet<V> valueSet(
@CheckForNull Comparator<? super V> valueComparator, Collection<? extends V> values) {
return (valueComparator == null)
? ImmutableSet.copyOf(values)
: ImmutableSortedSet.copyOf(valueComparator, values);
}
private static <V> ImmutableSet<V> emptySet(@CheckForNull Comparator<? super V> valueComparator) {
return (valueComparator == null)
? ImmutableSet.<V>of()
: ImmutableSortedSet.<V>emptySet(valueComparator);
}
private static <V> ImmutableSet.Builder<V> valuesBuilder(
@CheckForNull Comparator<? super V> valueComparator) {
return (valueComparator == null)
? new ImmutableSet.Builder<V>()
: new ImmutableSortedSet.Builder<V>(valueComparator);
}
/**
* @serialData number of distinct keys, and then for each distinct key: the key, the number of
* values for that key, and the key's values
*/
@GwtIncompatible // java.io.ObjectOutputStream
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
stream.writeObject(valueComparator());
Serialization.writeMultimap(this, stream);
}
@CheckForNull
Comparator<? super V> valueComparator() {
return emptySet instanceof ImmutableSortedSet
? ((ImmutableSortedSet<V>) emptySet).comparator()
: null;
}
@GwtIncompatible // java serialization
private static final class SetFieldSettersHolder {
static final Serialization.FieldSetter<ImmutableSetMultimap> EMPTY_SET_FIELD_SETTER =
Serialization.getFieldSetter(ImmutableSetMultimap.class, "emptySet");
}
@GwtIncompatible // java.io.ObjectInputStream
// Serialization type safety is at the caller's mercy.
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
Comparator<Object> valueComparator = (Comparator<Object>) stream.readObject();
int keyCount = stream.readInt();
if (keyCount < 0) {
throw new InvalidObjectException("Invalid key count " + keyCount);
}
ImmutableMap.Builder<Object, ImmutableSet<Object>> builder = ImmutableMap.builder();
int tmpSize = 0;
for (int i = 0; i < keyCount; i++) {
Object key = stream.readObject();
int valueCount = stream.readInt();
if (valueCount <= 0) {
throw new InvalidObjectException("Invalid value count " + valueCount);
}
ImmutableSet.Builder<Object> valuesBuilder = valuesBuilder(valueComparator);
for (int j = 0; j < valueCount; j++) {
valuesBuilder.add(stream.readObject());
}
ImmutableSet<Object> valueSet = valuesBuilder.build();
if (valueSet.size() != valueCount) {
throw new InvalidObjectException("Duplicate key-value pairs exist for key " + key);
}
builder.put(key, valueSet);
tmpSize += valueCount;
}
ImmutableMap<Object, ImmutableSet<Object>> tmpMap;
try {
tmpMap = builder.buildOrThrow();
} catch (IllegalArgumentException e) {
throw (InvalidObjectException) new InvalidObjectException(e.getMessage()).initCause(e);
}
FieldSettersHolder.MAP_FIELD_SETTER.set(this, tmpMap);
FieldSettersHolder.SIZE_FIELD_SETTER.set(this, tmpSize);
SetFieldSettersHolder.EMPTY_SET_FIELD_SETTER.set(this, emptySet(valueComparator));
}
@GwtIncompatible // not needed in emulated source.
private static final long serialVersionUID = 0;
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide;
import com.intellij.ide.dnd.LinuxDragAndDropSupport;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ProjectManagerAdapter;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.util.ArrayUtilRt;
import org.jetbrains.annotations.Nullable;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.StringSelection;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.awt.dnd.InvalidDnDOperationException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class PsiCopyPasteManager {
public static PsiCopyPasteManager getInstance() {
return ServiceManager.getService(PsiCopyPasteManager.class);
}
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.PsiCopyPasteManagerImpl");
private MyData myRecentData;
private final CopyPasteManagerEx myCopyPasteManager;
public PsiCopyPasteManager(CopyPasteManager copyPasteManager, ProjectManager projectManager) {
myCopyPasteManager = (CopyPasteManagerEx) copyPasteManager;
projectManager.addProjectManagerListener(new ProjectManagerAdapter() {
@Override
public void projectClosing(Project project) {
if (myRecentData != null && myRecentData.getProject() == project) {
myRecentData = null;
}
}
});
}
@Nullable
public PsiElement[] getElements(boolean[] isCopied) {
try {
Transferable content = myCopyPasteManager.getContents();
if (content == null) {
return null;
}
Object transferData;
try {
transferData = content.getTransferData(ourDataFlavor);
}
catch (UnsupportedFlavorException e) {
return null;
}
catch (IOException e) {
return null;
}
if (!(transferData instanceof MyData)) {
return null;
}
MyData dataProxy = (MyData)transferData;
if (!Comparing.equal(dataProxy, myRecentData)) {
return null;
}
if (isCopied != null) {
isCopied[0] = myRecentData.isCopied();
}
return myRecentData.getElements();
}
catch (Exception e) {
if (LOG.isDebugEnabled()) {
LOG.debug(e);
}
return null;
}
}
@Nullable
static PsiElement[] getElements(final Transferable content) {
if (content == null) return null;
Object transferData;
try {
transferData = content.getTransferData(ourDataFlavor);
}
catch (UnsupportedFlavorException e) {
return null;
}
catch (IOException e) {
return null;
}
catch (InvalidDnDOperationException e) {
return null;
}
return transferData instanceof MyData ? ((MyData)transferData).getElements() : null;
}
public void clear() {
Transferable old = myCopyPasteManager.getContents();
myRecentData = null;
StringSelection _new = new StringSelection("");
myCopyPasteManager.setSystemClipboardContent(_new);
myCopyPasteManager.fireContentChanged(old, _new);
}
public void setElements(PsiElement[] elements, boolean copied) {
Transferable old = myCopyPasteManager.getContents();
myRecentData = new MyData(elements, copied);
MyTransferable _new = new MyTransferable(myRecentData);
myCopyPasteManager.setSystemClipboardContent(_new);
myCopyPasteManager.fireContentChanged(old, _new);
}
public boolean isCutElement(Object element) {
if (myRecentData == null) return false;
if (myRecentData.isCopied()) return false;
PsiElement[] elements = myRecentData.getElements();
if (elements == null) return false;
for (PsiElement aElement : elements) {
if (aElement == element) return true;
}
return false;
}
private static final DataFlavor ourDataFlavor;
static {
try {
final Class<MyData> flavorClass = MyData.class;
final Thread currentThread = Thread.currentThread();
final ClassLoader currentLoader = currentThread.getContextClassLoader();
try {
currentThread.setContextClassLoader(flavorClass.getClassLoader());
ourDataFlavor = new DataFlavor(DataFlavor.javaJVMLocalObjectMimeType + ";class=" + flavorClass.getName());
}
finally {
currentThread.setContextClassLoader(currentLoader);
}
}
catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
public static class MyData {
private PsiElement[] myElements;
private final boolean myIsCopied;
public MyData(PsiElement[] elements, boolean copied) {
myElements = elements;
myIsCopied = copied;
}
public PsiElement[] getElements() {
if (myElements == null) return PsiElement.EMPTY_ARRAY;
int validElementsCount = 0;
final AccessToken token = ApplicationManager.getApplication().acquireReadActionLock();
try {
for (PsiElement element : myElements) {
if (element.isValid()) {
validElementsCount++;
}
}
if (validElementsCount == myElements.length) {
return myElements;
}
PsiElement[] validElements = new PsiElement[validElementsCount];
int j=0;
for (PsiElement element : myElements) {
if (element.isValid()) {
validElements[j++] = element;
}
}
myElements = validElements;
}
finally {
token.finish();
}
return myElements;
}
public boolean isCopied() {
return myIsCopied;
}
@Nullable
public Project getProject() {
if (myElements == null || myElements.length == 0) {
return null;
}
final PsiElement element = myElements[0];
return element.isValid() ? element.getProject() : null;
}
}
public static class MyTransferable implements Transferable {
private static final DataFlavor[] DATA_FLAVORS_COPY = {
ourDataFlavor, DataFlavor.stringFlavor, DataFlavor.javaFileListFlavor,
LinuxDragAndDropSupport.uriListFlavor, LinuxDragAndDropSupport.gnomeFileListFlavor
};
private static final DataFlavor[] DATA_FLAVORS_CUT = {
ourDataFlavor, DataFlavor.stringFlavor, DataFlavor.javaFileListFlavor,
LinuxDragAndDropSupport.uriListFlavor, LinuxDragAndDropSupport.gnomeFileListFlavor, LinuxDragAndDropSupport.kdeCutMarkFlavor
};
private final MyData myDataProxy;
public MyTransferable(MyData data) {
myDataProxy = data;
}
public MyTransferable(PsiElement[] selectedValues) {
this(new PsiCopyPasteManager.MyData(selectedValues, true));
}
@Override
@Nullable
public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException, IOException {
if (ourDataFlavor.equals(flavor)) {
return myDataProxy;
}
else if (DataFlavor.stringFlavor.equals(flavor)) {
return getDataAsText();
}
else if (DataFlavor.javaFileListFlavor.equals(flavor)) {
return getDataAsFileList();
}
else if (flavor.equals(LinuxDragAndDropSupport.uriListFlavor)) {
final List<File> files = getDataAsFileList();
if (files != null) {
return LinuxDragAndDropSupport.toUriList(files);
}
}
else if (flavor.equals(LinuxDragAndDropSupport.gnomeFileListFlavor)) {
final List<File> files = getDataAsFileList();
if (files != null) {
final String string = (myDataProxy.isCopied() ? "copy\n" : "cut\n") + LinuxDragAndDropSupport.toUriList(files);
return new ByteArrayInputStream(string.getBytes(CharsetToolkit.UTF8_CHARSET));
}
}
else if (flavor.equals(LinuxDragAndDropSupport.kdeCutMarkFlavor) && !myDataProxy.isCopied()) {
return new ByteArrayInputStream("1".getBytes());
}
return null;
}
@Nullable
private String getDataAsText() {
final AccessToken token = ApplicationManager.getApplication().acquireReadActionLock();
try {
final List<String> names = new ArrayList<String>();
for (PsiElement element : myDataProxy.getElements()) {
if (element instanceof PsiNamedElement) {
String name = ((PsiNamedElement)element).getName();
if (name != null) {
names.add(name);
}
}
}
return names.isEmpty() ? null : StringUtil.join(names, "\n");
}
finally {
token.finish();
}
}
@Nullable
private List<File> getDataAsFileList() {
final AccessToken token = ApplicationManager.getApplication().acquireReadActionLock();
try {
return asFileList(myDataProxy.getElements());
}
finally {
token.finish();
}
}
@Override
public DataFlavor[] getTransferDataFlavors() {
return myDataProxy.isCopied() ? DATA_FLAVORS_COPY : DATA_FLAVORS_CUT;
}
@Override
public boolean isDataFlavorSupported(DataFlavor flavor) {
return ArrayUtilRt.find(getTransferDataFlavors(), flavor) != -1;
}
public PsiElement[] getElements() {
return myDataProxy.getElements();
}
}
@Nullable
public static List<File> asFileList(final PsiElement[] elements) {
final List<File> result = new ArrayList<File>();
for (PsiElement element : elements) {
final PsiFileSystemItem psiFile;
if (element instanceof PsiFileSystemItem) {
psiFile = (PsiFileSystemItem)element;
}
else if (element instanceof PsiDirectoryContainer) {
final PsiDirectory[] directories = ((PsiDirectoryContainer)element).getDirectories();
psiFile = directories[0];
}
else {
psiFile = element.getContainingFile();
}
if (psiFile != null) {
VirtualFile vFile = psiFile.getVirtualFile();
if (vFile != null && vFile.getFileSystem() instanceof LocalFileSystem) {
result.add(new File(vFile.getPath()));
}
}
}
return result.isEmpty() ? null : result;
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver15;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnTlvUnicastQueryTimeoutVer15 implements OFBsnTlvUnicastQueryTimeout {
private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvUnicastQueryTimeoutVer15.class);
// version: 1.5
final static byte WIRE_VERSION = 6;
final static int LENGTH = 8;
private final static long DEFAULT_VALUE = 0x0L;
// OF message fields
private final long value;
//
// Immutable default instance
final static OFBsnTlvUnicastQueryTimeoutVer15 DEFAULT = new OFBsnTlvUnicastQueryTimeoutVer15(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFBsnTlvUnicastQueryTimeoutVer15(long value) {
this.value = value;
}
// Accessors for OF message fields
@Override
public int getType() {
return 0x9;
}
@Override
public long getValue() {
return value;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
public OFBsnTlvUnicastQueryTimeout.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnTlvUnicastQueryTimeout.Builder {
final OFBsnTlvUnicastQueryTimeoutVer15 parentMessage;
// OF message fields
private boolean valueSet;
private long value;
BuilderWithParent(OFBsnTlvUnicastQueryTimeoutVer15 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0x9;
}
@Override
public long getValue() {
return value;
}
@Override
public OFBsnTlvUnicastQueryTimeout.Builder setValue(long value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFBsnTlvUnicastQueryTimeout build() {
long value = this.valueSet ? this.value : parentMessage.value;
//
return new OFBsnTlvUnicastQueryTimeoutVer15(
value
);
}
}
static class Builder implements OFBsnTlvUnicastQueryTimeout.Builder {
// OF message fields
private boolean valueSet;
private long value;
@Override
public int getType() {
return 0x9;
}
@Override
public long getValue() {
return value;
}
@Override
public OFBsnTlvUnicastQueryTimeout.Builder setValue(long value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
//
@Override
public OFBsnTlvUnicastQueryTimeout build() {
long value = this.valueSet ? this.value : DEFAULT_VALUE;
return new OFBsnTlvUnicastQueryTimeoutVer15(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnTlvUnicastQueryTimeout> {
@Override
public OFBsnTlvUnicastQueryTimeout readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0x9
short type = bb.readShort();
if(type != (short) 0x9)
throw new OFParseError("Wrong type: Expected=0x9(0x9), got="+type);
int length = U16.f(bb.readShort());
if(length != 8)
throw new OFParseError("Wrong length: Expected=8(8), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long value = U32.f(bb.readInt());
OFBsnTlvUnicastQueryTimeoutVer15 bsnTlvUnicastQueryTimeoutVer15 = new OFBsnTlvUnicastQueryTimeoutVer15(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnTlvUnicastQueryTimeoutVer15);
return bsnTlvUnicastQueryTimeoutVer15;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnTlvUnicastQueryTimeoutVer15Funnel FUNNEL = new OFBsnTlvUnicastQueryTimeoutVer15Funnel();
static class OFBsnTlvUnicastQueryTimeoutVer15Funnel implements Funnel<OFBsnTlvUnicastQueryTimeoutVer15> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnTlvUnicastQueryTimeoutVer15 message, PrimitiveSink sink) {
// fixed value property type = 0x9
sink.putShort((short) 0x9);
// fixed value property length = 8
sink.putShort((short) 0x8);
sink.putLong(message.value);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnTlvUnicastQueryTimeoutVer15> {
@Override
public void write(ByteBuf bb, OFBsnTlvUnicastQueryTimeoutVer15 message) {
// fixed value property type = 0x9
bb.writeShort((short) 0x9);
// fixed value property length = 8
bb.writeShort((short) 0x8);
bb.writeInt(U32.t(message.value));
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnTlvUnicastQueryTimeoutVer15(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnTlvUnicastQueryTimeoutVer15 other = (OFBsnTlvUnicastQueryTimeoutVer15) obj;
if( value != other.value)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (value ^ (value >>> 32));
return result;
}
}
| |
package com.deleidos.hd.h2;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Map;
import org.apache.log4j.Logger;
import org.h2.jdbcx.JdbcConnectionPool;
import org.h2.tools.DeleteDbFiles;
import org.h2.tools.RunScript;
import org.h2.tools.Server;
import com.deleidos.hd.enums.DetailType;
import com.deleidos.hd.enums.MainType;
/**
* Data access object to persist and retrieve schemas, samples, and metrics from
* the H2 server.
*
* @author leegc
* @author yoonj1
*
*/
public class H2Database {
public static Logger logger = Logger.getLogger(H2Database.class);
public static final String DB_DRIVER = "org.h2.Driver";
private final JdbcConnectionPool connectionPool;
public int emptyHistogramId = 1;
public int unknownInterpretationId = 1;
private static Map<String, String> failedAnalysisMapping;
public static final String UNDETERMINED_ERROR_GUID = "failed-analysis-general-001";
public static final String UNDETERMINED_ERROR_MESSAGE = "There was an error while processing the sample.";
public static final String UNDETECTABLE_SAMPLE_GUID = "failed-analysis-undetectable-002";
public static final String UNDETECTABLE_SAMPLE_MESSAGE = "The sample file type could not be determined.";
public static final String UNSUPPORTED_PARSER_GUID = "failed-analysis-unsupported-003";
public static final String UNSUPPORTED_PARSER_MESSAGE = "The sample file type is not supported.";
public static final String DATA_ERROR_GUID = "failed-analysis-no-data-004";
public static final String DATA_ERROR_MESSAGE = "The sample analysis could not be completed due to a database error.";
public static final String IO_ERROR_GUID = "failed-analysis-io-err-005";
public static final String IO_ERROR_MESSAGE = "The sample analysis could not be completed due to a file error.";
private static volatile boolean shutdownFlag = false;
public static boolean debug = false;
private final H2Config config;
public H2Database() throws IOException {
this(new H2Config().load(), false);
}
static {
failedAnalysisMapping = initFailedAnalysisMapping();
}
public static Map<String, String> getFailedAnalysisMapping() {
return failedAnalysisMapping;
}
protected H2Database(H2Config config, boolean purge) {
if(purge) {
DeleteDbFiles.execute(config.getDir(), config.getName(), false);
}
this.connectionPool = JdbcConnectionPool.create(config.getConnectionString(), config.getUser(), config.getPasswd());
if(config.equals(H2Config.TEST_CONFIG)) {
this.connectionPool.setMaxConnections(1);
logger.info("Initialized connection pooling with 1 test connection.");
} else {
this.connectionPool.setMaxConnections(20);
logger.info("Initialized connection pooling with 20 connections.");
}
this.config = config;
}
private static Map<String, String> initFailedAnalysisMapping() {
Map<String, String> failedAnalysisMap = new HashMap<String, String>();
failedAnalysisMap.put(UNDETECTABLE_SAMPLE_GUID, UNDETECTABLE_SAMPLE_MESSAGE);
failedAnalysisMap.put(UNDETERMINED_ERROR_GUID, UNDETERMINED_ERROR_MESSAGE);
failedAnalysisMap.put(DATA_ERROR_GUID, DATA_ERROR_MESSAGE);
failedAnalysisMap.put(UNSUPPORTED_PARSER_GUID, UNSUPPORTED_PARSER_MESSAGE);
failedAnalysisMap.put(IO_ERROR_GUID, IO_ERROR_MESSAGE);
return failedAnalysisMap;
}
/**
* Start the server with defaults.
*
* @param args
* Command line arguments for H2.
* @throws IOException
* @throws InterruptedException
* @throws SQLException
* @throws ClassNotFoundException
*/
public static void main(String[] args) {
try {
H2Config config = new H2Config().load();
H2Database h2 = new H2Database(config, false);
try {
final Server server = h2.startServer(config);
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
logger.info("Server shutting down.");
h2.connectionPool.dispose();
server.stop();
}
});
try {
Connection dbConnection = h2.getNewConnection();
if(!dbConnection.isValid(5)) {
dbConnection.close();
throw new SQLException("Connection could not be made with H2.");
} else {
logger.info("H2 connection established.");
}
h2.runSchemaWizardStartupScript(dbConnection);
dbConnection.close();
h2.join(server);
} catch(SQLException e) {
logger.error(e);
System.err.println("Could not populate database with necessary tables.");
}
} catch (InterruptedException e) {
logger.error(e);
System.err.println("Unexpected threading error while starting server.");
} catch (SQLException e) {
logger.error(e);
System.err.println("Connection could not be made to server.");
}
} catch (IOException e) {
logger.error(e);
System.err.println("Could not find configuration file.");
}
}
/**
* Override method to run the server in its own thread. Able to implement
* maintenance here.
*/
public void join(Server server) {
long t1 = System.currentTimeMillis();
logger.info("Server running at " + server.getURL());
while (true) {
long currentTime = System.currentTimeMillis();
if ((currentTime - t1) > 360000 && t1 != currentTime) {
logger.info("Server running at " + server.getURL());
t1 = currentTime;
}
if (shutdownFlag) {
logger.info("Server thread signaled to stop.");
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
logger.error(e);
e.printStackTrace();
}
}
logger.info("Server loop ending.");
}
/**
* Start up the server with command line arguments. Unless the init(String
* initFile) method is called, the server will be started with properties
* from src/main/resources/h2-init.properties
*
* @param args
* Arguments to start the server. These should match the
* properties file. TODO if empty, use properties file
* @throws SQLException
* @throws ClassNotFoundException
* @throws InterruptedException
*/
public Server startServer(H2Config config) throws SQLException, InterruptedException {
logger.info("Starting up H2 server.");
String[] args = new String[3];
args[0] = "-tcpAllowOthers";
args[1] = "-tcpPort";
args[2] = config.getPortNum().toString();
Server server = Server.createTcpServer(args);
logger.info("Server started at " + server.getURL());
server.setOut(System.out);
server.start();
return server;
}
/**
* Generate the schema wizard's schema in the H2 database.
*
* @throws SQLException
* If there is an exception executing the startup script.
*/
public void runSchemaWizardStartupScript(Connection dbConnection) throws SQLException {
try {
InputStreamReader isr = new InputStreamReader(
getClass().getResourceAsStream("/scripts/init_field_characterization.sql"));
RunScript.execute(dbConnection, isr);
logger.info("Initialization script executed.");
isr.close();
String countMainType = "SELECT * FROM main_type";
PreparedStatement ppstCheckMain = dbConnection.prepareStatement(countMainType);
ResultSet rsMainType = ppstCheckMain.executeQuery();
if (!rsMainType.next()) {
for (MainType type : MainType.values()) {
int id = type.getIndex();
String name = type.name();
String insertIntoMainType = "INSERT INTO main_type VALUES (? , ?)";
PreparedStatement ppst = dbConnection.prepareStatement(insertIntoMainType);
ppst.setInt(1, id);
ppst.setString(2, name);
ppst.execute();
} // 3 num,4 string,3 bin
logger.info("Data types inserted.");
} else {
logger.info("Data types exist.");
}
String countDetailType = "SELECT * FROM detail_type";
PreparedStatement ppstCheckDetail = dbConnection.prepareStatement(countDetailType);
ResultSet rsDetailType = ppstCheckDetail.executeQuery();
if (!rsDetailType.next()) {
for (DetailType type : DetailType.values()) {
int id = type.getIndex();
String name = type.name();
String insertIntoDetailType = "INSERT INTO detail_type VALUES (?, ?)";
PreparedStatement ppst = dbConnection.prepareStatement(insertIntoDetailType);
ppst.setInt(1, id);
ppst.setString(2, name);
ppst.execute();
String insertIntoTypeMapping = "INSERT INTO type_mapping VALUES (NULL, ?, ?)";
PreparedStatement ppst2 = dbConnection.prepareStatement(insertIntoTypeMapping);
ppst2.setLong(1, type.getMainType().getIndex());
ppst2.setInt(2, id);
ppst2.execute();
}
logger.info("Detail types inserted.");
} else {
logger.info("Detail types exist.");
}
String countHistogram = "SELECT * FROM histogram";
PreparedStatement ppstCheckHistogram = dbConnection.prepareStatement(countHistogram);
ResultSet rsHistogram = ppstCheckHistogram.executeQuery();
if (!rsHistogram.next()) {
String insertIntoHistogram = "INSERT INTO histogram VALUES (NULL, NULL, NULL, NULL)";
PreparedStatement ppstHistogram = dbConnection.prepareStatement(insertIntoHistogram);
ppstHistogram.execute();
setEmptyHistogramId(getGeneratedKey(ppstHistogram));
logger.info("Empty histogram inserted.");
} else {
logger.info("Histograms exists.");
}
String countInterpretation = "SELECT * FROM interpretation";
PreparedStatement ppstCheckInterpretation = dbConnection.prepareStatement(countInterpretation);
ResultSet rsInterpretation = ppstCheckInterpretation.executeQuery();
if (!rsInterpretation.next()) {
String insertIntoInterpretation = "INSERT INTO interpretation (i_name) VALUES (?) ";
PreparedStatement ppstInterpretation = dbConnection.prepareStatement(insertIntoInterpretation,
PreparedStatement.RETURN_GENERATED_KEYS);
ppstInterpretation.setString(1, "unknown");
ppstInterpretation.execute();
setUnknownInterpretationId(getGeneratedKey(ppstInterpretation));
logger.info("Empty interpretation inserted.");
} else {
logger.info("Interpretations exists.");
}
logger.info("Database initialized.");
} catch (IOException e) {
logger.error(e);
e.printStackTrace();
}
}
/**
* Remove all files in the database directory with the database name. The
* database must be closed before calling this method.
*/
public void purge() {
if(connectionPool.getActiveConnections() > 0) {
logger.warn(connectionPool.getActiveConnections() + " open connections when attempting to shut down database.");
}
this.connectionPool.dispose();
logger.info("Deleting database files at " + config.getDir() + ".");
DeleteDbFiles.execute(config.getDir(), config.getName(), false);
}
/**
* Initialize the database connection with the given connection string.
*
* @param connectionString
* The string to connect with.
* @return The connection.
* @throws ClassNotFoundException
* @throws SQLException
protected Connection initSingleConnection(H2Config config) throws ClassNotFoundException, SQLException {
String connString = config.getConnectionString();
logger.info("Connecting with " + connString);
try {
Class.forName(config.getDriver());
} catch (Exception e) {
logger.error(e);
Class.forName(DB_DRIVER);
}
Connection dbConnection = DriverManager.getConnection(connString, config.getUser(), config.getPasswd());
return dbConnection;
}*/
public boolean isShutdownFlag() {
return shutdownFlag;
}
public void setShutdownFlag() {
H2Database.shutdownFlag = true;
}
// Getters and Setters
public int getEmptyHistogramId() {
return emptyHistogramId;
}
public void setEmptyHistogramId(int emptyHistogramId) {
this.emptyHistogramId = emptyHistogramId;
}
public int getUnknownInterpretationId() {
return unknownInterpretationId;
}
public void setUnknownInterpretationId(int unknownInterpretationId) {
this.unknownInterpretationId = unknownInterpretationId;
}
/**
* Return the generated key from a statement (H2 only allows a maximum of
* one to be returned per query). Calling this method will not execute the
* statement.
*
* @param stmt
* The executed statement
* @return The key generated by executing this statement
* @throws SQLException
* Thrown if there is an error in the query.
*/
protected int getGeneratedKey(Statement stmt) throws SQLException {
ResultSet gKeys = stmt.getGeneratedKeys();
int fieldId = -1;
if (gKeys.next()) {
fieldId = gKeys.getInt(1);
stmt.close();
} else {
stmt.close();
return -1;
}
return fieldId;
}
public Connection getNewConnection() throws SQLException {
Connection connection = connectionPool.getConnection();
logger.debug(connectionPool.getActiveConnections() + " connections open in H2Database.");
return connection;
}
public H2Config getConfig() {
return config;
}
}
| |
/*
* The Trustees of Columbia University in the City of New York
* licenses this file to you under the Educational Community License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the
* License at:
*
* http://opensource.org/licenses/ecl2.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.delegatedaccess.tool.pages;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreeModel;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.Button;
import org.apache.wicket.markup.html.form.ChoiceRenderer;
import org.apache.wicket.markup.html.form.DropDownChoice;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.Radio;
import org.apache.wicket.markup.html.form.RadioGroup;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.tree.AbstractTree;
import org.apache.wicket.markup.html.tree.BaseTree;
import org.apache.wicket.markup.html.tree.LinkTree;
import org.apache.wicket.model.AbstractReadOnlyModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.model.StringResourceModel;
import org.sakaiproject.delegatedaccess.model.ListOptionSerialized;
import org.sakaiproject.delegatedaccess.model.NodeModel;
import org.sakaiproject.delegatedaccess.model.SelectOption;
import org.sakaiproject.delegatedaccess.util.DelegatedAccessConstants;
import org.sakaiproject.site.api.Site;
/**
* Creates the landing page for a user to show them all their access and links to go to the sites
*
* @author Bryan Holladay (holladay@longsight.com)
*
*/
public class UserPage extends BaseTreePage{
private BaseTree tree;
boolean expand = true;
private String search = "";
private String instructorField = "";
private SelectOption termField;
private TreeModel treeModel = null;
private String userId;
private String selectedInstructorOption = DelegatedAccessConstants.ADVANCED_SEARCH_INSTRUCTOR_TYPE_INSTRUCTOR;
protected AbstractTree getTree()
{
return tree;
}
public UserPage(){
disableLink(accessPageLink);
//this is the home page so set user as current user
if(isShoppingPeriodTool()){
userId = DelegatedAccessConstants.SHOPPING_PERIOD_USER;
}else{
//check if they should even have acces to this page:
if(!hasDelegatedAccess){
if(hasShoppingAdmin){
setResponsePage(new ShoppingEditPage());
}else if(hasAccessAdmin){
setResponsePage(new SearchUsersPage());
}
}
userId = sakaiProxy.getCurrentUserId();
}
//Title
Label title = new Label("title");
if(isShoppingPeriodTool()){
title.setDefaultModel(new StringResourceModel("shoppingTitle", null));
}else{
title.setDefaultModel(new StringResourceModel("delegatedAccessTitle", null));
}
add(title);
//Description
Label description = new Label("description");
if(isShoppingPeriodTool()){
description.setDefaultModel(new StringResourceModel("shoppingInstruction", null));
}else{
description.setDefaultModel(new StringResourceModel("delegatedAccessInstructions", null));
}
add(description);
setTreeModel(userId, false);
final List<ListOptionSerialized> blankRestrictedTools = projectLogic.getEntireToolsList();
if(treeModel != null){
DefaultMutableTreeNode node = (DefaultMutableTreeNode) treeModel.getRoot();
if(((NodeModel) node.getUserObject()).isDirectAccess()){
projectLogic.addChildrenNodes(node, userId, blankRestrictedTools, true, null, false, isShoppingPeriodTool());
}
}
//a null model means the user doesn't have any associations
tree = new LinkTree("tree", treeModel){
@Override
public boolean isVisible() {
return treeModel != null
&& ((!sakaiProxy.getDisableUserTreeView() && !isShoppingPeriodTool()) ||
(!sakaiProxy.getDisableShoppingTreeView() && isShoppingPeriodTool()));
}
protected void onNodeLinkClicked(Object node, BaseTree tree, AjaxRequestTarget target) {
if(tree.isLeaf(node)){
//The user has clicked a leaf and chances are its a site.
//all sites are leafs, but there may be non sites as leafs
NodeModel nodeModel = (NodeModel) ((DefaultMutableTreeNode) node).getUserObject();
if(nodeModel.isSiteNode()){
Site site = sakaiProxy.getSiteByRef(nodeModel.getNode().title);
if(site != null){
//redirect the user to the site
target.appendJavascript("popupWindow('" + site.getUrl() + "', '" + new StringResourceModel("popupBlockWarning", null).getObject() + "')");
}
}
}else{
boolean anyAdded = false;
if(!tree.getTreeState().isNodeExpanded(node) && !((NodeModel) ((DefaultMutableTreeNode) node).getUserObject()).isAddedDirectChildrenFlag()){
anyAdded = projectLogic.addChildrenNodes(node, userId, blankRestrictedTools, true, null, false, isShoppingPeriodTool());
((NodeModel) ((DefaultMutableTreeNode) node).getUserObject()).setAddedDirectChildrenFlag(true);
}
if(anyAdded){
collapseEmptyFoldersHelper((DefaultMutableTreeNode) node);
}
if(!tree.getTreeState().isNodeExpanded(node) || anyAdded){
tree.getTreeState().expandNode(node);
}else{
tree.getTreeState().collapseNode(node);
}
}
};
protected void onJunctionLinkClicked(AjaxRequestTarget target, Object node) {
//the nodes are generated on the fly with ajax. This will add any child nodes that
//are missing in the tree. Expanding and collapsing will refresh the tree node
if(tree.getTreeState().isNodeExpanded(node) && !((NodeModel) ((DefaultMutableTreeNode) node).getUserObject()).isAddedDirectChildrenFlag()){
boolean anyAdded = projectLogic.addChildrenNodes(node, userId, blankRestrictedTools, true, null, false, isShoppingPeriodTool());
((NodeModel) ((DefaultMutableTreeNode) node).getUserObject()).setAddedDirectChildrenFlag(true);
if(anyAdded){
collapseEmptyFoldersHelper((DefaultMutableTreeNode) node);
}
}
}
@Override
protected boolean isForceRebuildOnSelectionChange() {
return false;
};
};
tree.setRootLess(true);
add(tree);
tree.getTreeState().collapseAll();
//Access Warning:
Label noAccessLabel = new Label("noAccess"){
@Override
public boolean isVisible() {
return treeModel == null && (!isShoppingPeriodTool() && !sakaiProxy.getDisableUserTreeView());
}
};
if(isShoppingPeriodTool()){
noAccessLabel.setDefaultModel(new StringResourceModel("noShoppingSites", null));
}else{
noAccessLabel.setDefaultModel(new StringResourceModel("noDelegatedAccess", null));
}
add(noAccessLabel);
//no hierarchy setup:
add(new Label("noHierarchy", new StringResourceModel("noHierarchy", null)){
public boolean isVisible() {
return treeModel == null && sakaiProxy.isSuperUser() && "".equals(projectLogic.getRootNodeId().id);
}
});
//Create Search Form:
final PropertyModel<String> messageModel = new PropertyModel<String>(this, "search");
final PropertyModel<String> instructorFieldModel = new PropertyModel<String>(this, "instructorField");
final PropertyModel<SelectOption> termFieldModel = new PropertyModel<SelectOption>(this, "termField");
Form<?> form = new Form("form"){
@Override
protected void onSubmit() {
Map<String, String> advancedOptions = new HashMap<String,String>();
if(termField != null && !"".equals(termField.getValue())){
advancedOptions.put(DelegatedAccessConstants.ADVANCED_SEARCH_TERM, termField.getValue());
}
if(instructorField != null && !"".equals(instructorField)){
advancedOptions.put(DelegatedAccessConstants.ADVANCED_SEARCH_INSTRUCTOR, instructorField);
advancedOptions.put(DelegatedAccessConstants.ADVANCED_SEARCH_INSTRUCTOR_TYPE, selectedInstructorOption);
}
//need to set the tree model so that is is the full model
setResponsePage(new UserPageSiteSearch(search, advancedOptions, false, false));
}
@Override
public boolean isVisible() {
return treeModel != null || isShoppingPeriodTool() || (!isShoppingPeriodTool() && sakaiProxy.getDisableUserTreeView());
}
};
AbstractReadOnlyModel<String> instructorFieldLabelModel = new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
if(isShoppingPeriodTool()){
return new StringResourceModel("instructor", null).getObject() + ":";
}else{
return new StringResourceModel("user", null).getObject() + ":";
}
}
};
form.add(new Label("instructorFieldLabel", instructorFieldLabelModel));
form.add(new TextField<String>("search", messageModel));
form.add(new TextField<String>("instructorField", instructorFieldModel));
//Instructor Options:
RadioGroup group = new RadioGroup("instructorOptionsGroup", new PropertyModel<String>(this, "selectedInstructorOption")){
@Override
public boolean isVisible() {
//only show if its not shopping period
return !isShoppingPeriodTool();
}
};
group.add(new Radio("instructorOption", Model.of(DelegatedAccessConstants.ADVANCED_SEARCH_INSTRUCTOR_TYPE_INSTRUCTOR)));
group.add(new Radio("memberOption", Model.of(DelegatedAccessConstants.ADVANCED_SEARCH_INSTRUCTOR_TYPE_MEMBER)));
form.add(group);
List<SelectOption> termOptions = new ArrayList<SelectOption>();
for(String[] entry : sakaiProxy.getTerms()){
termOptions.add(new SelectOption(entry[1], entry[0]));
}
ChoiceRenderer choiceRenderer = new ChoiceRenderer("label", "value");
DropDownChoice termFieldDropDown = new DropDownChoice("termField", termFieldModel, termOptions, choiceRenderer);
//keeps the null option (choose one) after a user selects an option
termFieldDropDown.setNullValid(true);
form.add(termFieldDropDown);
form.add(new WebMarkupContainer("searchHeader"));
form.add(new Button("submitButton"));
add(form);
}
private void setTreeModel(String userId, boolean cascade){
if(isShoppingPeriodTool()){
if(sakaiProxy.getDisableShoppingTreeView()){
treeModel = null;
}else{
treeModel = projectLogic.createAccessTreeModelForUser(DelegatedAccessConstants.SHOPPING_PERIOD_USER, false, cascade);
if(treeModel != null && ((DefaultMutableTreeNode) treeModel.getRoot()).getChildCount() == 0){
treeModel = null;
}
}
}else{
if(sakaiProxy.getDisableUserTreeView()){
treeModel = null;
}else{
treeModel = projectLogic.createAccessTreeModelForUser(userId, false, cascade);
}
}
}
}
| |
package ru.slonos;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.audio.Music;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.utils.TimeUtils;
import java.util.Iterator;
class GameScreen implements Screen {
private final ElephantHitGame game;
private Vector3 touchPos;
//Resources and objects
//back
private Texture backIimg;
private Sprite backSprite;
private Music backMusic;
private long startTime;
private Elephant elephant;
//baloons
private Baloons baloons;
//Infrastructure
private OrthographicCamera camera;
GameScreen(final ElephantHitGame game) {
this.game = game;
startTime = TimeUtils.nanoTime();
touchPos = new Vector3();
camera = new OrthographicCamera();
//init Resources
//back
backIimg = new Texture("back.png");
backSprite = new Sprite(backIimg);
backSprite.setOrigin(0,0);
backMusic = Gdx.audio.newMusic(Gdx.files.internal("music.mp3"));
backMusic.play();
backMusic.setVolume(backMusic.getVolume()*0.4f);
elephant = new Elephant();
baloons = new Baloons(64, 20);
}
@Override
public void show() {
}
@Override
public void render(float delta) {
camera.setToOrtho(false,ElephantHitGame.WIDTH,ElephantHitGame.HEIGHT);
Gdx.gl.glClearColor(1, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
camera.update();
game.batch.setProjectionMatrix(camera.combined);
//DRAW
game.batch.begin();
//Draw Back
game.batch.draw(backSprite,backSprite.getX(),backSprite.getY(),ElephantHitGame.WIDTH,ElephantHitGame.HEIGHT);
// draw other stuff
elephant.draw(game.batch);
baloons.draw(game.batch);
//drawCnt
game.font.draw(game.batch, "Baloons: "+String.valueOf(baloons.getCount()), 15, ElephantHitGame.HEIGHT-20);
long dur = getGameDuration();
game.font.draw(game.batch, "Countdown: "+String.valueOf(dur), ElephantHitGame.WIDTH-(("Countdown: "+ElephantHitGame.GAME_DURATION).length()-1)*game.font.getXHeight()-15, ElephantHitGame.HEIGHT-20);
game.batch.end();
inputControl();
moveNonUserObjects();
CheckEndGame();
}
private void inputControl() {
long shootTimeout = 120000000;
if(Gdx.input.isTouched()) {
touchPos.set(Gdx.input.getX(), Gdx.input.getY(), 0);
camera.unproject(touchPos);
elephant.setX(touchPos.x-elephant.getWidth()/2);
if (MathUtils.ceilPositive(touchPos.x-Gdx.input.getX())<elephant.getWidth() && TimeUtils.nanoTime()-elephant.getlashShootTime() > shootTimeout) {
elephant.shoot(elephant.getX()+ elephant.getWidth()/2.2f,elephant.getY()+elephant.getHeight());
}
}
}
private void moveNonUserObjects() {
Iterator<Sprite> iter = elephant.getShootsSpriteIterator();
while(iter.hasNext()) {
Sprite shoot = iter.next();
shoot.setY(shoot.getY()+ 340*Gdx.graphics.getDeltaTime());
if (shoot.getY()>ElephantHitGame.HEIGHT+elephant.getShootHeight()) iter.remove();
Iterator<Sprite> bal = baloons.getSpriteIterator();
while(bal.hasNext()) {
Sprite baloon=bal.next();
if (shoot.getBoundingRectangle().overlaps(baloon.getBoundingRectangle())) {
bal.remove();
iter.remove();
baloons.playDestroySound();
baloons.setDestroyTime(TimeUtils.nanoTime());
break;
}
}
}
}
private void CheckEndGame() {
if (baloons.isEmpty(TimeUtils.nanoTime()) && getGameDuration()>0) {
String message="YOU ARE WINNER!!!!";
game.setScreen(new EndGameScreen(game,message));
dispose();
}
if (getGameDuration()<=0 && !baloons.isEmpty(TimeUtils.nanoTime())) {
String message="YOU ARE LOSER!!!!";
game.setScreen(new EndGameScreen(game,message));
dispose();
}
}
private long getGameDuration() {
return (ElephantHitGame.GAME_DURATION*1000000000 -TimeUtils.nanoTime()+this.startTime)/1000000000;
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
}
@Override
public void resume() {
}
@Override
public void hide() {
}
@Override
public void dispose() {
backIimg.dispose();
backMusic.dispose();
elephant.dispose();
baloons.dispose();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.