lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
e866faf612e9a7d165f3cbfb721b56f59f754db1
| 0
|
deeplearning4j/nd4j,ambraspace/nd4j,huitseeker/nd4j,deeplearning4j/nd4j,huitseeker/nd4j,ambraspace/nd4j,gagatust/nd4j,gagatust/nd4j,smarthi/nd4j,smarthi/nd4j
|
package org.nd4j.linalg.lossfunctions.impl;
import lombok.EqualsAndHashCode;
import org.apache.commons.math3.util.Pair;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.lossfunctions.ILossFunction;
import org.nd4j.linalg.ops.transforms.Transforms;
/**
* Created by susaneraly on 8/15/16.
*/
@EqualsAndHashCode
public class LossMSLE implements ILossFunction {
public INDArray scoreArray(INDArray labels, INDArray preOutput, String activationFn, INDArray mask) {
INDArray scoreArr;
INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
scoreArr = Transforms.log(output.addi(1.0).divi(labels.add(1.0)),false);
scoreArr = scoreArr.muli(scoreArr).divi(labels.size(1));
if (mask != null) scoreArr.muliColumnVector(mask);
return scoreArr;
}
@Override
public double computeScore(INDArray labels, INDArray preOutput, String activationFn, INDArray mask, boolean average) {
INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask);
double score = scoreArr.sumNumber().doubleValue();
if(average) score /= scoreArr.size(0);
return score;
}
@Override
public INDArray computeScoreArray(INDArray labels, INDArray preOutput, String activationFn, INDArray mask) {
INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask);
return scoreArr.sum(1);
}
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, String activationFn, INDArray mask) {
INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
INDArray sigmaPrimeZ = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn,preOutput.dup()).derivative());
INDArray p1 = output.add(1.0);
INDArray gradients = sigmaPrimeZ.divi(p1).muli(2.0/labels.size(1));
INDArray logRatio = Transforms.log(p1.divi(labels.add(1.0)),false);
gradients.muli(logRatio);
if(mask != null){
gradients.muliColumnVector(mask);
}
return gradients;
}
@Override
public org.apache.commons.math3.util.Pair<Double, INDArray> computeGradientAndScore(INDArray labels, INDArray preOutput, String activationFn, INDArray mask, boolean average) {
//TODO: probably a more efficient way to do this...
//Yes - will implement in round two. Just want to get done now.
return new Pair<>(
computeScore(labels, preOutput, activationFn, mask, average),
computeGradient(labels, preOutput, activationFn, mask));
}
@Override
public String toString(){
return "LossMSLE()";
}
}
|
nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossMSLE.java
|
package org.nd4j.linalg.lossfunctions.impl;
import lombok.EqualsAndHashCode;
import org.apache.commons.math3.util.Pair;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.lossfunctions.ILossFunction;
import org.nd4j.linalg.ops.transforms.Transforms;
/**
* Created by susaneraly on 8/15/16.
*/
@EqualsAndHashCode
public class LossMSLE implements ILossFunction {
public INDArray scoreArray(INDArray labels, INDArray preOutput, String activationFn, INDArray mask) {
INDArray scoreArr;
INDArray postOutput = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
scoreArr = Transforms.log(postOutput);
scoreArr.subi(Transforms.log(labels));
scoreArr = scoreArr.mul(scoreArr);
if (mask != null) scoreArr.muliColumnVector(mask);
return scoreArr;
}
@Override
public double computeScore(INDArray labels, INDArray preOutput, String activationFn, INDArray mask, boolean average) {
INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask);
double score = scoreArr.sumNumber().doubleValue();
if(average) score /= scoreArr.size(0);
return score;
}
@Override
public INDArray computeScoreArray(INDArray labels, INDArray preOutput, String activationFn, INDArray mask) {
INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask);
return scoreArr.sum(1);
}
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, String activationFn, INDArray mask) {
INDArray postOutput = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
INDArray postOutDer = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn,preOutput.dup()).derivative());
INDArray logyHat = Transforms.log(postOutput);
INDArray gradients = logyHat.sub(Transforms.log(labels));
gradients.muli(2);
gradients.muli(postOutDer);
gradients.divi(postOutput.addi(Nd4j.EPS_THRESHOLD));
if(mask != null){
gradients.muliColumnVector(mask);
}
return gradients;
}
@Override
public org.apache.commons.math3.util.Pair<Double, INDArray> computeGradientAndScore(INDArray labels, INDArray preOutput, String activationFn, INDArray mask, boolean average) {
//TODO: probably a more efficient way to do this...
//Yes - will implement in round two. Just want to get done now.
return new Pair<>(
computeScore(labels, preOutput, activationFn, mask, average),
computeGradient(labels, preOutput, activationFn, mask));
}
@Override
public String toString(){
return "LossMSLE()";
}
}
|
Fix LossMSLE score and gradient
|
nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossMSLE.java
|
Fix LossMSLE score and gradient
|
|
Java
|
apache-2.0
|
b3a8ae7ab20f51e53ef68d0af3f9584e66817c45
| 0
|
ham1/jmeter,ham1/jmeter,benbenw/jmeter,etnetera/jmeter,apache/jmeter,ham1/jmeter,ham1/jmeter,etnetera/jmeter,apache/jmeter,etnetera/jmeter,benbenw/jmeter,apache/jmeter,apache/jmeter,apache/jmeter,benbenw/jmeter,benbenw/jmeter,etnetera/jmeter,ham1/jmeter,etnetera/jmeter
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.protocol.jms.sampler;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.TextMessage;
import org.apache.jmeter.samplers.Entry;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.TestListener;
import org.apache.jmeter.engine.event.LoopIterationEvent;
import org.apache.jmeter.protocol.jms.control.gui.JMSSubscriberGui;
import org.apache.jmeter.protocol.jms.client.ClientPool;
import org.apache.jmeter.protocol.jms.client.OnMessageSubscriber;
import org.apache.jmeter.protocol.jms.client.ReceiveSubscriber;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/**
* This class implements the JMS Subcriber sampler
*/
public class SubscriberSampler extends BaseJMSSampler implements TestListener, MessageListener {
private static final long serialVersionUID = 233L;
// private Subscriber SUBSCRIBER = null;
private static final Logger log = LoggingManager.getLoggerForClass();
private transient ReceiveSubscriber SUBSCRIBER = null;
private StringBuffer BUFFER = new StringBuffer();
private transient int counter = 0;
private volatile boolean isRunning;
private static final String CLIENT_CHOICE = "jms.client_choice"; // $NON-NLS-1$
public SubscriberSampler() {
}
public void testEnded(String test) {
testEnded();
}
public void testStarted(String test) {
testStarted();
}
/**
* testEnded is called by Jmeter's engine. the implementation will reset the
* count, set RUN to false and clear the StringBuffer.
*/
public synchronized void testEnded() {
log.info("SubscriberSampler.testEnded called");
this.isRunning = false;
this.resetCount();
ClientPool.clearClient();
this.BUFFER = null;
if (this.SUBSCRIBER != null) {
this.SUBSCRIBER = null;
}
}
/*
* (non-Javadoc)
*
* @see junit.framework.TestListener#startTest(junit.framework.Test)
*/
public void testStarted() {
}
public void testIterationStart(LoopIterationEvent event) {
}
/**
* Create the OnMessageSubscriber client and set the sampler as the message
* listener.
*
*/
private OnMessageSubscriber initListenerClient() {
OnMessageSubscriber sub = (OnMessageSubscriber) ClientPool.get(this);
if (sub == null) {
sub = new OnMessageSubscriber(this.getUseJNDIPropertiesAsBoolean(), this.getJNDIInitialContextFactory(),
this.getProviderUrl(), this.getConnectionFactory(), this.getTopic(), this.isUseAuth(), this
.getUsername(), this.getPassword());
sub.setMessageListener(this);
sub.resume();
ClientPool.addClient(sub);
ClientPool.put(this, sub);
log.info("SubscriberSampler.initListenerClient called");
log.info("loop count " + this.getIterations());
}
return sub;
}
/**
* Create the ReceiveSubscriber client for the sampler.
*/
public void initReceiveClient() {
this.SUBSCRIBER = new ReceiveSubscriber(this.getUseJNDIPropertiesAsBoolean(), this
.getJNDIInitialContextFactory(), this.getProviderUrl(), this.getConnectionFactory(), this.getTopic(),
this.isUseAuth(), this.getUsername(), this.getPassword());
this.SUBSCRIBER.resume();
ClientPool.addClient(this.SUBSCRIBER);
log.info("SubscriberSampler.initReceiveClient called");
}
/*
* (non-Javadoc)
*
* @see org.apache.jmeter.samplers.Sampler#sample(org.apache.jmeter.samplers.Entry)
*/
public SampleResult sample(Entry e) {
return this.sample();
}
/**
* sample method will check which client it should use and call the
* appropriate client specific sample method.
*
* @return the appropriate sample result
*/
public SampleResult sample() {
if (this.getClientChoice().equals(JMSSubscriberGui.receive_str)) {
return sampleWithReceive();
} else {
return sampleWithListener();
}
}
/**
* sample will block until messages are received
*
* @return the sample result
*/
private SampleResult sampleWithListener() {
SampleResult result = new SampleResult();
result.setSampleLabel(getName());
initListenerClient();
this.isRunning = true;
int loop = this.getIterationCount();
result.sampleStart();
while (this.isRunning && this.count(0) < loop) {
try {
Thread.sleep(0, 50);
} catch (Exception e) {
log.info(e.getMessage());
}
}
result.sampleEnd();
result.setResponseMessage(loop + " samples messages recieved");
synchronized (this) {
if (this.getReadResponseAsBoolean()) {
result.setResponseData(this.BUFFER.toString().getBytes());
} else {
result.setBytes(this.BUFFER.toString().getBytes().length);
}
}
result.setSuccessful(true);
result.setResponseCode(loop + " message(s) recieved successfully");
result.setSamplerData("Not applicable");
result.setSampleCount(loop);
this.resetCount();
return result;
}
/**
* Sample method uses the ReceiveSubscriber client instead of onMessage
* approach.
*
* @return the sample result
*/
private SampleResult sampleWithReceive() {
SampleResult result = new SampleResult();
result.setSampleLabel(getName());
if (this.SUBSCRIBER == null) {
this.initReceiveClient();
this.SUBSCRIBER.start();
}
int loop = this.getIterationCount();
this.SUBSCRIBER.setLoop(loop);
result.sampleStart();
while (this.SUBSCRIBER.count(0) < loop) {
try {
Thread.sleep(0, 50);
} catch (Exception e) {
log.info(e.getMessage());
}
}
result.sampleEnd();
result.setResponseMessage(loop + " samples messages recieved");
if (this.getReadResponseAsBoolean()) {
result.setResponseData(this.SUBSCRIBER.getMessage().getBytes());
} else {
result.setBytes(this.SUBSCRIBER.getMessage().getBytes().length);
}
result.setSuccessful(true);
result.setResponseCode(loop + " message(s) recieved successfully");
result.setSamplerData("Not applicable");
result.setSampleCount(loop);
this.SUBSCRIBER.clear();
this.SUBSCRIBER.resetCount();
return result;
}
/**
* The sampler implements MessageListener directly and sets itself as the
* listener with the TopicSubscriber.
*/
public synchronized void onMessage(Message message) {
try {
if (message instanceof TextMessage) {
TextMessage msg = (TextMessage) message;
String content = msg.getText();
if (content != null) {
this.BUFFER.append(content);
count(1);
}
}
} catch (JMSException e) {
log.error(e.getMessage());
}
}
/**
* increment the count and return the new value.
*
* @param increment
* @return the new value
*/
private synchronized int count(int increment) {
this.counter += increment;
return this.counter;
}
/**
* resetCount will set the counter to zero and set the length of the
* StringBuffer to zero.
*/
private synchronized void resetCount() {
this.counter = 0;
this.BUFFER.setLength(0);
}
// ----------- get/set methods ------------------- //
/**
* Set the client choice. There are two options: ReceiveSusbscriber and
* OnMessageSubscriber.
*/
public void setClientChoice(String choice) {
setProperty(CLIENT_CHOICE, choice);
}
/**
* Return the client choice.
*
* @return the client choice
*/
public String getClientChoice() {
return getPropertyAsString(CLIENT_CHOICE);
}
}
|
src/protocol/jms/org/apache/jmeter/protocol/jms/sampler/SubscriberSampler.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.protocol.jms.sampler;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.TextMessage;
import org.apache.jmeter.samplers.Entry;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.TestListener;
import org.apache.jmeter.engine.event.LoopIterationEvent;
import org.apache.jmeter.protocol.jms.control.gui.JMSSubscriberGui;
import org.apache.jmeter.protocol.jms.client.ClientPool;
import org.apache.jmeter.protocol.jms.client.OnMessageSubscriber;
import org.apache.jmeter.protocol.jms.client.ReceiveSubscriber;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/**
* This class implements the JMS Subcriber sampler
*/
public class SubscriberSampler extends BaseJMSSampler implements TestListener, MessageListener {
private static final long serialVersionUID = 233L;
// private Subscriber SUBSCRIBER = null;
private static final Logger log = LoggingManager.getLoggerForClass();
private transient ReceiveSubscriber SUBSCRIBER = null;
private StringBuffer BUFFER = new StringBuffer();
private transient int counter = 0;
private transient int loop = 0;
private transient boolean RUN = true;
private static final String CLIENT_CHOICE = "jms.client_choice"; // $NON-NLS-1$
public SubscriberSampler() {
}
public void testEnded(String test) {
testEnded();
}
public void testStarted(String test) {
testStarted();
}
/**
* testEnded is called by Jmeter's engine. the implementation will reset the
* count, set RUN to false and clear the StringBuffer.
*/
public synchronized void testEnded() {
log.info("SubscriberSampler.testEnded called");
this.RUN = false;
this.resetCount();
ClientPool.clearClient();
this.BUFFER = null;
if (this.SUBSCRIBER != null) {
this.SUBSCRIBER = null;
}
}
/*
* (non-Javadoc)
*
* @see junit.framework.TestListener#startTest(junit.framework.Test)
*/
public void testStarted() {
}
public void testIterationStart(LoopIterationEvent event) {
}
/**
* Create the OnMessageSubscriber client and set the sampler as the message
* listener.
*
*/
public synchronized OnMessageSubscriber initListenerClient() {
OnMessageSubscriber sub = (OnMessageSubscriber) ClientPool.get(this);
if (sub == null) {
sub = new OnMessageSubscriber(this.getUseJNDIPropertiesAsBoolean(), this.getJNDIInitialContextFactory(),
this.getProviderUrl(), this.getConnectionFactory(), this.getTopic(), this.isUseAuth(), this
.getUsername(), this.getPassword());
sub.setMessageListener(this);
sub.resume();
ClientPool.addClient(sub);
ClientPool.put(this, sub);
log.info("SubscriberSampler.initListenerClient called");
log.info("loop count " + this.getIterations());
}
this.RUN = true;
return sub;
}
/**
* Create the ReceiveSubscriber client for the sampler.
*/
public void initReceiveClient() {
this.SUBSCRIBER = new ReceiveSubscriber(this.getUseJNDIPropertiesAsBoolean(), this
.getJNDIInitialContextFactory(), this.getProviderUrl(), this.getConnectionFactory(), this.getTopic(),
this.isUseAuth(), this.getUsername(), this.getPassword());
this.SUBSCRIBER.resume();
ClientPool.addClient(this.SUBSCRIBER);
log.info("SubscriberSampler.initReceiveClient called");
}
/*
* (non-Javadoc)
*
* @see org.apache.jmeter.samplers.Sampler#sample(org.apache.jmeter.samplers.Entry)
*/
public SampleResult sample(Entry e) {
return this.sample();
}
/**
* sample method will check which client it should use and call the
* appropriate client specific sample method.
*
* @return the appropriate sample result
*/
public SampleResult sample() {
if (this.getClientChoice().equals(JMSSubscriberGui.receive_str)) {
return sampleWithReceive();
} else {
return sampleWithListener();
}
}
/**
* sample will block until messages are received
*
* @return the sample result
*/
public SampleResult sampleWithListener() {
SampleResult result = new SampleResult();
result.setSampleLabel(getName());
OnMessageSubscriber sub = initListenerClient();
this.loop = this.getIterationCount();
result.sampleStart();
while (this.RUN && this.count(0) < this.loop) {
try {
Thread.sleep(0, 50);
} catch (Exception e) {
log.info(e.getMessage());
}
}
result.sampleEnd();
result.setResponseMessage(loop + " samples messages recieved");
if (this.getReadResponseAsBoolean()) {
result.setResponseData(this.BUFFER.toString().getBytes());
} else {
result.setBytes(this.BUFFER.toString().getBytes().length);
}
result.setSuccessful(true);
result.setResponseCode(loop + " message(s) recieved successfully");
result.setSamplerData("Not applicable");
result.setSampleCount(loop);
this.resetCount();
return result;
}
/**
* Sample method uses the ReceiveSubscriber client instead of onMessage
* approach.
*
* @return the sample result
*/
public SampleResult sampleWithReceive() {
SampleResult result = new SampleResult();
result.setSampleLabel(getName());
if (this.SUBSCRIBER == null) {
this.initReceiveClient();
this.SUBSCRIBER.start();
}
this.loop = this.getIterationCount();
this.SUBSCRIBER.setLoop(this.loop);
result.sampleStart();
while (this.SUBSCRIBER.count(0) < this.loop) {
try {
Thread.sleep(0, 50);
} catch (Exception e) {
log.info(e.getMessage());
}
}
result.sampleEnd();
result.setResponseMessage(loop + " samples messages recieved");
if (this.getReadResponseAsBoolean()) {
result.setResponseData(this.SUBSCRIBER.getMessage().getBytes());
} else {
result.setBytes(this.SUBSCRIBER.getMessage().getBytes().length);
}
result.setSuccessful(true);
result.setResponseCode(loop + " message(s) recieved successfully");
result.setSamplerData("Not applicable");
result.setSampleCount(this.loop);
this.SUBSCRIBER.clear();
this.SUBSCRIBER.resetCount();
return result;
}
/**
* The sampler implements MessageListener directly and sets itself as the
* listener with the TopicSubscriber.
*/
public synchronized void onMessage(Message message) {
try {
if (message instanceof TextMessage) {
TextMessage msg = (TextMessage) message;
String content = msg.getText();
if (content != null) {
this.BUFFER.append(content);
count(1);
}
}
} catch (JMSException e) {
log.error(e.getMessage());
}
}
/**
* increment the count and return the new value.
*
* @param increment
* @return the new value
*/
public synchronized int count(int increment) {
this.counter += increment;
return this.counter;
}
/**
* resetCount will set the counter to zero and set the length of the
* StringBuffer to zero.
*/
public synchronized void resetCount() {
this.counter = 0;
this.BUFFER.setLength(0);
}
// ----------- get/set methods ------------------- //
/**
* Set the client choice. There are two options: ReceiveSusbscriber and
* OnMessageSubscriber.
*/
public void setClientChoice(String choice) {
setProperty(CLIENT_CHOICE, choice);
}
/**
* Return the client choice.
*
* @return the client choice
*/
public String getClientChoice() {
return getPropertyAsString(CLIENT_CHOICE);
}
}
|
Fix some thread-safety issues
git-svn-id: https://svn.apache.org/repos/asf/jakarta/jmeter/trunk@712034 13f79535-47bb-0310-9956-ffa450edef68
Former-commit-id: 556a19817ece22029a5e11c399031588633f0a59
|
src/protocol/jms/org/apache/jmeter/protocol/jms/sampler/SubscriberSampler.java
|
Fix some thread-safety issues
|
|
Java
|
apache-2.0
|
bc64fcdec7c4a13654cde955406c6f8c974da53d
| 0
|
AlexeyKashintsev/PlatypusJS,altsoft/PlatypusJS,vadimv/PlatypusJS,vadimv/PlatypusJS,AlexeyKashintsev/PlatypusJS,jskonst/PlatypusJS,marat-gainullin/platypus-js,vadimv/PlatypusJS,jskonst/PlatypusJS,marat-gainullin/PlatypusJS,jskonst/PlatypusJS,marat-gainullin/PlatypusJS,altsoft/PlatypusJS,jskonst/PlatypusJS,marat-gainullin/platypus-js,AlexeyKashintsev/PlatypusJS,marat-gainullin/platypus-js,vadimv/PlatypusJS,marat-gainullin/PlatypusJS,AlexeyKashintsev/PlatypusJS,altsoft/PlatypusJS
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.designer.application.module.completion;
import com.eas.script.ScriptUtils;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
import javax.swing.text.BadLocationException;
import jdk.nashorn.internal.ir.AccessNode;
import jdk.nashorn.internal.ir.FunctionNode;
import jdk.nashorn.internal.ir.IdentNode;
import jdk.nashorn.internal.ir.LexicalContext;
import jdk.nashorn.internal.ir.Node;
import jdk.nashorn.internal.ir.visitor.NodeVisitor;
import jdk.nashorn.internal.parser.Token;
import org.netbeans.modules.editor.NbEditorDocument;
/**
*
* @author vv
*/
public class CompletionPoint {
private final static char DOT_CHARACTER = '.';//NOI18N
private String filter = "";//NOI18N
private List<CompletionToken> completionTokens;
private int caretBeginWordOffset;
private int caretEndWordOffset;
private FunctionNode astRoot;
public String getFilter() {
return filter;
}
public List<CompletionToken> getCompletionTokens() {
return completionTokens;
}
public int getCaretBeginWordOffset() {
return caretBeginWordOffset;
}
public int getCaretEndWordOffset() {
return caretEndWordOffset;
}
public FunctionNode getAstRoot() {
return astRoot;
}
public static CompletionPoint createInstance(NbEditorDocument doc, int caretOffset) throws Exception {
final CompletionPoint cp = new CompletionPoint();
if (caretOffset > 0) {
char caretPositionChar = doc.getChars(caretOffset, 1)[0];
char preCaretPositionChar = doc.getChars(caretOffset - 1, 1)[0];
boolean inBetweenSentence = false;
if (Character.isJavaIdentifierPart(preCaretPositionChar) || preCaretPositionChar == DOT_CHARACTER) {
boolean afterDotCaretPosintion = !Character.isJavaIdentifierPart(caretPositionChar)
&& preCaretPositionChar == DOT_CHARACTER;
String docStr = removeComments(doc.getText(0, doc.getLength()));
cp.astRoot = ScriptUtils.parseJs(
afterDotCaretPosintion
? sanitizeDot(docStr, caretOffset - 1) : docStr);
List<CompletionToken> ctxTokens = getContextTokens(cp.astRoot, afterDotCaretPosintion ? caretOffset - 1 : caretOffset);
List<CompletionToken> offsetTokens = getOffsetTokens(ctxTokens, caretOffset);
inBetweenSentence = ctxTokens.size() > offsetTokens.size() + 1;
cp.completionTokens = offsetTokens;
}
cp.caretBeginWordOffset = getStartWordOffset(doc, caretOffset);
cp.caretEndWordOffset = getEndWordOffset(doc, caretOffset);
if (caretOffset - cp.caretBeginWordOffset > 0 && !inBetweenSentence) {
cp.filter = doc.getText(cp.caretBeginWordOffset, caretOffset - cp.caretBeginWordOffset);
}
}
return cp;
}
public static List<CompletionToken> getContextTokens(final Node ast, final int offset) {
class AccessNodeLexicalContext extends LexicalContext {
final Deque<AccessNode> accessNodes = new ArrayDeque<>();
}
final AccessNodeLexicalContext lc = new AccessNodeLexicalContext();
final List<CompletionToken> ctx = new ArrayList<>();
ast.accept(new NodeVisitor<AccessNodeLexicalContext>(lc) {
@Override
protected boolean enterDefault(Node node) {
return true;
}
@Override
public boolean enterAccessNode(AccessNode accessNode) {
lc.accessNodes.push(accessNode);
return super.enterAccessNode(accessNode);
}
@Override
public Node leaveAccessNode(AccessNode accessNode) {
lc.accessNodes.pop();
return super.leaveAccessNode(accessNode);
}
@Override
public boolean enterIdentNode(IdentNode identNode) {
if (!lc.accessNodes.isEmpty()
&& ScriptUtils.isInNode(lc.accessNodes.peekLast(), identNode)
&& ScriptUtils.isInNode(lc.accessNodes.peekLast(), offset)
|| lc.accessNodes.isEmpty()
&& ScriptUtils.isInNode(identNode, offset)) {
ctx.add(new CompletionToken(identNode.getName(), CompletionTokenType.IDENTIFIER, identNode));
}
return true;
}
});
return ctx;
}
private static String removeComments(String text) {
StringBuilder sb = new StringBuilder();
int i = 0;
for (Long t : ScriptUtils.getCommentsTokens(text)) {
int offset = Token.descPosition(t);
int lenght = Token.descLength(t);
sb.append(text.substring(i, offset));
for (int j = 0; j < lenght; j++) {
sb.append(" ");//NOI18N
}
i = offset + lenght;
}
sb.append(text.substring(i));
return sb.toString();
}
private static List<CompletionToken> getOffsetTokens(List<CompletionToken> contextTokens, int offset) {
final List<CompletionToken> tokens = new ArrayList<>();
for (CompletionToken token : contextTokens) {
if (token.node.getFinish() < offset) {
tokens.add(token);
} else {
break;
}
}
return tokens;
}
private static String sanitizeDot(String str, int position) {
StringBuilder sb = new StringBuilder(str.substring(0, position));
sb.append(" "); //NOI18N
sb.append(str.substring(position + 1));
return sb.toString();
}
private static int getStartWordOffset(NbEditorDocument aDoc, int caretOffset) throws Exception {
while (caretOffset > 0 && aDoc.getLength() > 0
&& (Character.isJavaIdentifierPart(aDoc.getText(caretOffset - 1, 1).toCharArray()[0]))) {
caretOffset--;
}
return caretOffset;
}
private static int getEndWordOffset(NbEditorDocument aDoc, int caretOffset) throws BadLocationException {
while (caretOffset < aDoc.getLength() && aDoc.getLength() > 0
&& Character.isJavaIdentifierPart(aDoc.getText(caretOffset, 1).toCharArray()[0])) {
caretOffset++;
}
return caretOffset;
}
public enum CompletionTokenType {
IDENTIFIER,
PROPERTY_GET,
ELEMENT_GET
}
public static class CompletionToken {
public final String name;
public final CompletionTokenType type;
public final Node node;
public CompletionToken(String aName, CompletionTokenType aType, Node aNode) {
name = aName;
type = aType;
node = aNode;
}
}
}
|
designer/PlatypusModules/src/com/eas/designer/application/module/completion/CompletionPoint.java
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.designer.application.module.completion;
import com.eas.script.ScriptUtils;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
import javax.swing.text.BadLocationException;
import jdk.nashorn.internal.ir.AccessNode;
import jdk.nashorn.internal.ir.FunctionNode;
import jdk.nashorn.internal.ir.IdentNode;
import jdk.nashorn.internal.ir.LexicalContext;
import jdk.nashorn.internal.ir.Node;
import jdk.nashorn.internal.ir.visitor.NodeVisitor;
import jdk.nashorn.internal.parser.Token;
import org.netbeans.modules.editor.NbEditorDocument;
/**
*
* @author vv
*/
public class CompletionPoint {
private final static char DOT_CHARACTER = '.';//NOI18N
private String filter = "";//NOI18N
private List<CompletionToken> completionTokens;
private int caretBeginWordOffset;
private int caretEndWordOffset;
private FunctionNode astRoot;
public String getFilter() {
return filter;
}
public List<CompletionToken> getCompletionTokens() {
return completionTokens;
}
public int getCaretBeginWordOffset() {
return caretBeginWordOffset;
}
public int getCaretEndWordOffset() {
return caretEndWordOffset;
}
public FunctionNode getAstRoot() {
return astRoot;
}
public static CompletionPoint createInstance(NbEditorDocument doc, int caretOffset) throws Exception {
final CompletionPoint cp = new CompletionPoint();
if (caretOffset > 0) {
char caretPositionChar = doc.getChars(caretOffset, 1)[0];
char preCaretPositionChar = doc.getChars(caretOffset - 1, 1)[0];
boolean inBetweenSentence = false;
if (Character.isJavaIdentifierPart(preCaretPositionChar) || preCaretPositionChar == DOT_CHARACTER) {
boolean afterDotCaretPosintion = !Character.isJavaIdentifierPart(caretPositionChar)
&& preCaretPositionChar == DOT_CHARACTER;
String docStr = removeComments(doc.getText(0, doc.getLength()));
cp.astRoot = ScriptUtils.parseJs(
afterDotCaretPosintion
? sanitizeDot(docStr, caretOffset - 1) : docStr);
//Node offsetNode = AstUtlities.getOffsetNode(cp.astRoot, afterDotCaretPosintion ? caretOffset - 1 : caretOffset);
//final Node subRoot = getCompletionSubtree(cp.astRoot, caretOffset);
//if (subRoot != null) {
List<CompletionToken> ctxTokens = getContextTokens(cp.astRoot, afterDotCaretPosintion ? caretOffset - 1 : caretOffset);
List<CompletionToken> offsetTokens = getOffsetTokens(ctxTokens, caretOffset);
inBetweenSentence = ctxTokens.size() > offsetTokens.size() + 1;
cp.completionTokens = offsetTokens;
//}
}
cp.caretBeginWordOffset = getStartWordOffset(doc, caretOffset);
cp.caretEndWordOffset = getEndWordOffset(doc, caretOffset);
if (caretOffset - cp.caretBeginWordOffset > 0 && !inBetweenSentence) {
cp.filter = doc.getText(cp.caretBeginWordOffset, caretOffset - cp.caretBeginWordOffset);
}
}
return cp;
}
public static List<CompletionToken> getContextTokens(final Node ast, final int offset) {
class AccessNodeLexicalContext extends LexicalContext {
final Deque<AccessNode> accessNodes = new ArrayDeque<>();
}
final AccessNodeLexicalContext lc = new AccessNodeLexicalContext();
final List<CompletionToken> ctx = new ArrayList<>();
ast.accept(new NodeVisitor<AccessNodeLexicalContext>(lc) {
@Override
protected boolean enterDefault(Node node) {
return true;// AstUtlities.isInNode(node, offset);
}
@Override
public boolean enterAccessNode(AccessNode accessNode) {
lc.accessNodes.push(accessNode);
return super.enterAccessNode(accessNode);
}
@Override
public Node leaveAccessNode(AccessNode accessNode) {
lc.accessNodes.pop();
return super.leaveAccessNode(accessNode);
}
@Override
public boolean enterIdentNode(IdentNode identNode) {
if (!lc.accessNodes.isEmpty()
&& ScriptUtils.isInNode(lc.accessNodes.peekLast(), identNode)
&& ScriptUtils.isInNode(lc.accessNodes.peekLast(), offset)
|| lc.accessNodes.isEmpty()
&& ScriptUtils.isInNode(identNode, offset)) {
ctx.add(new CompletionToken(identNode.getName(), CompletionTokenType.IDENTIFIER, identNode));
}
return true;
}
});
/*
subRoot.visit(new NodeVisitor() {
@Override
public boolean visit(AstNode an) {
if (an == subRoot) {
if (an instanceof KeywordLiteral) { // this.
ctx.add(new CompletionToken(an.toSource(), CompletionTokenType.IDENTIFIER, an));
return false;
}
if (an instanceof Name) { // prop1.
ctx.add(new CompletionToken(((Name) an).getIdentifier(), CompletionTokenType.IDENTIFIER, an));
return false;
}
return true;
} else if (an.getParent() instanceof ElementGet) {
ElementGet eg = (ElementGet) an.getParent();
if (eg.getElement() == an) { //prop1[prop2] , don't drill deeper
ctx.add(new CompletionToken(an.toSource(), CompletionTokenType.ELEMENT_GET, an));
return false;
}
} else if (an.getParent() instanceof PropertyGet) { //prop1.prop2
PropertyGet pg = (PropertyGet) an.getParent();
if (pg.getTarget() == an && an instanceof Name) {
ctx.add(new CompletionToken(((Name) an).getIdentifier(), CompletionTokenType.IDENTIFIER, an));
return false;
}
if (pg.getTarget() == an && an instanceof KeywordLiteral) {
ctx.add(new CompletionToken(an.toSource(), CompletionTokenType.IDENTIFIER, an));
return false;
} else if (pg.getProperty() == an && an instanceof Name) {
ctx.add(new CompletionToken(((Name) an).getIdentifier(), CompletionTokenType.PROPERTY_GET, an));
return false;
}
}
return an instanceof PropertyGet || an instanceof ElementGet;
}
});
*/
return ctx;
}
private static String removeComments(String text) {
StringBuilder sb = new StringBuilder();
int i = 0;
for (Long t : ScriptUtils.getCommentsTokens(text)) {
int offset = Token.descPosition(t);
int lenght = Token.descLength(t);
sb.append(text.substring(i, offset));
for (int j = 0; j < lenght; j++) {
sb.append(" ");//NOI18N
}
i = offset + lenght;
}
sb.append(text.substring(i));
return sb.toString();
}
private static List<CompletionToken> getOffsetTokens(List<CompletionToken> contextTokens, int offset) {
final List<CompletionToken> tokens = new ArrayList<>();
for (CompletionToken token : contextTokens) {
if (token.node.getFinish() < offset) {
tokens.add(token);
} else {
break;
}
}
return tokens;
}
private static String sanitizeDot(String str, int position) {
StringBuilder sb = new StringBuilder(str.substring(0, position));
sb.append(" "); //NOI18N
sb.append(str.substring(position + 1));
return sb.toString();
}
private static int getStartWordOffset(NbEditorDocument aDoc, int caretOffset) throws Exception {
while (caretOffset > 0 && aDoc.getLength() > 0
&& (Character.isJavaIdentifierPart(aDoc.getText(caretOffset - 1, 1).toCharArray()[0]))) {
caretOffset--;
}
return caretOffset;
}
private static int getEndWordOffset(NbEditorDocument aDoc, int caretOffset) throws BadLocationException {
while (caretOffset < aDoc.getLength() && aDoc.getLength() > 0
&& Character.isJavaIdentifierPart(aDoc.getText(caretOffset, 1).toCharArray()[0])) {
caretOffset++;
}
return caretOffset;
}
public enum CompletionTokenType {
IDENTIFIER,
PROPERTY_GET,
ELEMENT_GET
}
public static class CompletionToken {
public final String name;
public final CompletionTokenType type;
public final Node node;
public CompletionToken(String aName, CompletionTokenType aType, Node aNode) {
name = aName;
type = aType;
node = aNode;
}
}
}
|
Somme commented code removed.
|
designer/PlatypusModules/src/com/eas/designer/application/module/completion/CompletionPoint.java
|
Somme commented code removed.
|
|
Java
|
apache-2.0
|
e24afc9c2fc39285aa0f5100b15bf31c88159567
| 0
|
ctamisier/generator-jhipster,pascalgrimaud/generator-jhipster,ctamisier/generator-jhipster,JulienMrgrd/generator-jhipster,dynamicguy/generator-jhipster,baskeboler/generator-jhipster,duderoot/generator-jhipster,xetys/generator-jhipster,gzsombor/generator-jhipster,deepu105/generator-jhipster,atomfrede/generator-jhipster,robertmilowski/generator-jhipster,ziogiugno/generator-jhipster,dimeros/generator-jhipster,eosimosu/generator-jhipster,mraible/generator-jhipster,vivekmore/generator-jhipster,cbornet/generator-jhipster,maniacneron/generator-jhipster,wmarques/generator-jhipster,gzsombor/generator-jhipster,ctamisier/generator-jhipster,dimeros/generator-jhipster,jkutner/generator-jhipster,duderoot/generator-jhipster,jkutner/generator-jhipster,dynamicguy/generator-jhipster,danielpetisme/generator-jhipster,ruddell/generator-jhipster,wmarques/generator-jhipster,erikkemperman/generator-jhipster,wmarques/generator-jhipster,jkutner/generator-jhipster,dynamicguy/generator-jhipster,jhipster/generator-jhipster,rkohel/generator-jhipster,liseri/generator-jhipster,gmarziou/generator-jhipster,ctamisier/generator-jhipster,Tcharl/generator-jhipster,mraible/generator-jhipster,wmarques/generator-jhipster,sohibegit/generator-jhipster,duderoot/generator-jhipster,maniacneron/generator-jhipster,liseri/generator-jhipster,dimeros/generator-jhipster,hdurix/generator-jhipster,gmarziou/generator-jhipster,rkohel/generator-jhipster,erikkemperman/generator-jhipster,stevehouel/generator-jhipster,gmarziou/generator-jhipster,eosimosu/generator-jhipster,siliconharborlabs/generator-jhipster,mosoft521/generator-jhipster,Tcharl/generator-jhipster,baskeboler/generator-jhipster,eosimosu/generator-jhipster,Tcharl/generator-jhipster,pascalgrimaud/generator-jhipster,yongli82/generator-jhipster,baskeboler/generator-jhipster,hdurix/generator-jhipster,gmarziou/generator-jhipster,pascalgrimaud/generator-jhipster,ramzimaalej/generator-jhipster,jkutner/generator-jhipster,atomfrede/generator-jhipster,cbornet/generator-jhipster,dalbelap/generator-jhipster,maniacneron/generator-jhipster,jhipster/generator-jhipster,liseri/generator-jhipster,cbornet/generator-jhipster,ctamisier/generator-jhipster,eosimosu/generator-jhipster,ramzimaalej/generator-jhipster,lrkwz/generator-jhipster,sendilkumarn/generator-jhipster,Tcharl/generator-jhipster,robertmilowski/generator-jhipster,gzsombor/generator-jhipster,sendilkumarn/generator-jhipster,rifatdover/generator-jhipster,rkohel/generator-jhipster,rifatdover/generator-jhipster,JulienMrgrd/generator-jhipster,cbornet/generator-jhipster,sohibegit/generator-jhipster,vivekmore/generator-jhipster,stevehouel/generator-jhipster,robertmilowski/generator-jhipster,duderoot/generator-jhipster,danielpetisme/generator-jhipster,nkolosnjaji/generator-jhipster,siliconharborlabs/generator-jhipster,duderoot/generator-jhipster,jkutner/generator-jhipster,danielpetisme/generator-jhipster,nkolosnjaji/generator-jhipster,rifatdover/generator-jhipster,deepu105/generator-jhipster,baskeboler/generator-jhipster,dalbelap/generator-jhipster,nkolosnjaji/generator-jhipster,dynamicguy/generator-jhipster,hdurix/generator-jhipster,nkolosnjaji/generator-jhipster,mraible/generator-jhipster,xetys/generator-jhipster,mraible/generator-jhipster,stevehouel/generator-jhipster,ziogiugno/generator-jhipster,JulienMrgrd/generator-jhipster,baskeboler/generator-jhipster,ruddell/generator-jhipster,maniacneron/generator-jhipster,mosoft521/generator-jhipster,eosimosu/generator-jhipster,wmarques/generator-jhipster,robertmilowski/generator-jhipster,yongli82/generator-jhipster,dalbelap/generator-jhipster,pascalgrimaud/generator-jhipster,PierreBesson/generator-jhipster,Tcharl/generator-jhipster,cbornet/generator-jhipster,erikkemperman/generator-jhipster,ruddell/generator-jhipster,lrkwz/generator-jhipster,xetys/generator-jhipster,mosoft521/generator-jhipster,rkohel/generator-jhipster,liseri/generator-jhipster,sohibegit/generator-jhipster,jhipster/generator-jhipster,vivekmore/generator-jhipster,liseri/generator-jhipster,PierreBesson/generator-jhipster,PierreBesson/generator-jhipster,gmarziou/generator-jhipster,jhipster/generator-jhipster,mosoft521/generator-jhipster,dimeros/generator-jhipster,ziogiugno/generator-jhipster,deepu105/generator-jhipster,rkohel/generator-jhipster,yongli82/generator-jhipster,JulienMrgrd/generator-jhipster,atomfrede/generator-jhipster,ruddell/generator-jhipster,gzsombor/generator-jhipster,erikkemperman/generator-jhipster,PierreBesson/generator-jhipster,yongli82/generator-jhipster,siliconharborlabs/generator-jhipster,sendilkumarn/generator-jhipster,ziogiugno/generator-jhipster,xetys/generator-jhipster,ruddell/generator-jhipster,nkolosnjaji/generator-jhipster,pascalgrimaud/generator-jhipster,siliconharborlabs/generator-jhipster,sohibegit/generator-jhipster,danielpetisme/generator-jhipster,jhipster/generator-jhipster,mraible/generator-jhipster,atomfrede/generator-jhipster,hdurix/generator-jhipster,ramzimaalej/generator-jhipster,dimeros/generator-jhipster,siliconharborlabs/generator-jhipster,danielpetisme/generator-jhipster,sendilkumarn/generator-jhipster,stevehouel/generator-jhipster,stevehouel/generator-jhipster,lrkwz/generator-jhipster,atomfrede/generator-jhipster,yongli82/generator-jhipster,lrkwz/generator-jhipster,dalbelap/generator-jhipster,vivekmore/generator-jhipster,PierreBesson/generator-jhipster,robertmilowski/generator-jhipster,ziogiugno/generator-jhipster,hdurix/generator-jhipster,mosoft521/generator-jhipster,dalbelap/generator-jhipster,vivekmore/generator-jhipster,maniacneron/generator-jhipster,erikkemperman/generator-jhipster,deepu105/generator-jhipster,lrkwz/generator-jhipster,JulienMrgrd/generator-jhipster,sendilkumarn/generator-jhipster,deepu105/generator-jhipster,sohibegit/generator-jhipster,gzsombor/generator-jhipster
|
package <%=packageName%>.repository;
<% if (databaseType == 'cassandra') { %>
import com.datastax.driver.core.*;
import com.datastax.driver.mapping.Mapper;
import com.datastax.driver.mapping.MappingManager;<% } %>
import <%=packageName%>.domain.User;
import java.time.ZonedDateTime;<% if (databaseType == 'sql') { %>
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;<% } %><% if (databaseType == 'mongodb') { %>
import org.springframework.data.mongodb.repository.MongoRepository;<% } %>
import java.util.List;
import java.util.Optional;<% if (databaseType == 'cassandra') { %>
import org.springframework.stereotype.Repository;
import org.springframework.util.StringUtils;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;<%}%>
<% if (databaseType == 'sql') { %>/**
* Spring Data JPA repository for the User entity.
*/<% } %><% if (databaseType == 'mongodb') { %>/**
* Spring Data MongoDB repository for the User entity.
*/<% } %><% if (databaseType == 'cassandra') { %>/**
* Cassandra repository for the User entity.
*/<% } %><% if (databaseType == 'sql' || databaseType == 'mongodb') { %>
public interface UserRepository extends <% if (databaseType == 'sql') { %>JpaRepository<User, Long><% } %><% if (databaseType == 'mongodb') { %>MongoRepository<User, String><% } %> {
Optional<User> findOneByActivationKey(String activationKey);
List<User> findAllByActivatedIsFalseAndCreatedDateBefore(ZonedDateTime dateTime);
Optional<User> findOneByResetKey(String resetKey);
Optional<User> findOneByEmail(String email);
Optional<User> findOneByLogin(String login);
Optional<User> findOneById(<%= pkType %> userId);
<%_ if (databaseType == 'sql') { _%>
@Query(value = "select distinct user from User user left join fetch user.authorities",
countQuery = "select count(user) from User user")
Page<User> findAllWithAuthorities(Pageable pageable);
<%_ } _%>
@Override
void delete(User t);
}<% } else if (databaseType == 'cassandra') { %>
@Repository
public class UserRepository {
@Inject
private Session session;
private Mapper<User> mapper;
private PreparedStatement findAllStmt;
private PreparedStatement findOneByActivationKeyStmt;
private PreparedStatement findOneByResetKeyStmt;
private PreparedStatement insertByActivationKeyStmt;
private PreparedStatement insertByResetKeyStmt;
private PreparedStatement deleteByActivationKeyStmt;
private PreparedStatement deleteByResetKeyStmt;
private PreparedStatement findOneByLoginStmt;
private PreparedStatement insertByLoginStmt;
private PreparedStatement deleteByLoginStmt;
private PreparedStatement findOneByEmailStmt;
private PreparedStatement insertByEmailStmt;
private PreparedStatement deleteByEmailStmt;
@PostConstruct
public void init() {
mapper = new MappingManager(session).mapper(User.class);
findAllStmt = session.prepare("SELECT * FROM user");
findOneByActivationKeyStmt = session.prepare(
"SELECT id " +
"FROM user_by_activation_key " +
"WHERE activation_key = :activation_key");
findOneByResetKeyStmt = session.prepare(
"SELECT id " +
"FROM user_by_reset_key " +
"WHERE reset_key = :reset_key");
insertByActivationKeyStmt = session.prepare(
"INSERT INTO user_by_activation_key (activation_key, id) " +
"VALUES (:activation_key, :id)");
insertByResetKeyStmt = session.prepare(
"INSERT INTO user_by_reset_key (reset_key, id) " +
"VALUES (:reset_key, :id)");
deleteByActivationKeyStmt = session.prepare(
"DELETE FROM user_by_activation_key " +
"WHERE activation_key = :activation_key");
deleteByResetKeyStmt = session.prepare(
"DELETE FROM user_by_reset_key " +
"WHERE reset_key = :reset_key");
findOneByLoginStmt = session.prepare(
"SELECT id " +
"FROM user_by_login " +
"WHERE login = :login");
insertByLoginStmt = session.prepare(
"INSERT INTO user_by_login (login, id) " +
"VALUES (:login, :id)");
deleteByLoginStmt = session.prepare(
"DELETE FROM user_by_login " +
"WHERE login = :login");
findOneByEmailStmt = session.prepare(
"SELECT id " +
"FROM user_by_email " +
"WHERE email = :email");
insertByEmailStmt = session.prepare(
"INSERT INTO user_by_email (email, id) " +
"VALUES (:email, :id)");
deleteByEmailStmt = session.prepare(
"DELETE FROM user_by_email " +
"WHERE email = :email");
}
public User findOne(String id) {
return mapper.get(id);
}
public Optional<User> findOneById(String id) {
return Optional.of(findOne(id));
}
public Optional<User> findOneByActivationKey(String activationKey) {
BoundStatement stmt = findOneByActivationKeyStmt.bind();
stmt.setString("activation_key", activationKey);
return findOneFromIndex(stmt);
}
public Optional<User> findOneByResetKey(String resetKey) {
BoundStatement stmt = findOneByResetKeyStmt.bind();
stmt.setString("reset_key", resetKey);
return findOneFromIndex(stmt);
}
public Optional<User> findOneByEmail(String email) {
BoundStatement stmt = findOneByEmailStmt.bind();
stmt.setString("email", email);
return findOneFromIndex(stmt);
}
public Optional<User> findOneByLogin(String login) {
BoundStatement stmt = findOneByLoginStmt.bind();
stmt.setString("login", login);
return findOneFromIndex(stmt);
}
public List<User> findAll() {
return mapper.map(session.execute(findAllStmt.bind())).all();
}
public User save(User user) {
User oldUser = mapper.get(user.getId());
if (oldUser != null) {
if (!StringUtils.isEmpty(oldUser.getActivationKey()) && !oldUser.getActivationKey().equals(user.getActivationKey())) {
session.execute(deleteByActivationKeyStmt.bind().setString("activation_key", oldUser.getActivationKey()));
}
if (!StringUtils.isEmpty(oldUser.getResetKey()) && !oldUser.getResetKey().equals(user.getResetKey())) {
session.execute(deleteByResetKeyStmt.bind().setString("reset_key", oldUser.getResetKey()));
}
if (!StringUtils.isEmpty(oldUser.getLogin()) && !oldUser.getLogin().equals(user.getLogin())) {
session.execute(deleteByLoginStmt.bind().setString("login", oldUser.getLogin()));
}
if (!StringUtils.isEmpty(oldUser.getEmail()) && !oldUser.getEmail().equals(user.getEmail())) {
session.execute(deleteByEmailStmt.bind().setString("email", oldUser.getEmail()));
}
}
BatchStatement batch = new BatchStatement();
batch.add(mapper.saveQuery(user));
if (!StringUtils.isEmpty(user.getActivationKey())) {
batch.add(insertByActivationKeyStmt.bind()
.setString("activation_key", user.getActivationKey())
.setString("id", user.getId()));
}
if (!StringUtils.isEmpty(user.getResetKey())) {
batch.add(insertByResetKeyStmt.bind()
.setString("reset_key", user.getResetKey())
.setString("id", user.getId()));
}
batch.add(insertByLoginStmt.bind()
.setString("login", user.getLogin())
.setString("id", user.getId()));
batch.add(insertByEmailStmt.bind()
.setString("email", user.getEmail())
.setString("id", user.getId()));
session.execute(batch);
return user;
}
public void delete(User user) {
BatchStatement batch = new BatchStatement();
batch.add(mapper.deleteQuery(user));
if (!StringUtils.isEmpty(user.getActivationKey())) {
batch.add(deleteByActivationKeyStmt.bind().setString("activation_key", user.getActivationKey()));
}
if (!StringUtils.isEmpty(user.getResetKey())) {
batch.add(deleteByResetKeyStmt.bind().setString("reset_key", user.getResetKey()));
}
batch.add(deleteByLoginStmt.bind().setString("login", user.getLogin()));
batch.add(deleteByEmailStmt.bind().setString("email", user.getEmail()));
session.execute(batch);
}
private Optional<User> findOneFromIndex(BoundStatement stmt) {
ResultSet rs = session.execute(stmt);
if (rs.isExhausted()) {
return Optional.empty();
}
return Optional.ofNullable(rs.one().getString("id"))
.map(id -> Optional.ofNullable(mapper.get(id)))
.get();
}
}<% } %>
|
generators/server/templates/src/main/java/package/repository/_UserRepository.java
|
package <%=packageName%>.repository;
<% if (databaseType == 'cassandra') { %>
import com.datastax.driver.core.*;
import com.datastax.driver.mapping.Mapper;
import com.datastax.driver.mapping.MappingManager;<% } %>
import <%=packageName%>.domain.User;
import java.time.ZonedDateTime;<% if (databaseType == 'sql') { %>
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;<% } %><% if (databaseType == 'mongodb') { %>
import org.springframework.data.mongodb.repository.MongoRepository;<% } %>
import java.util.List;
import java.util.Optional;<% if (databaseType == 'cassandra') { %>
import org.springframework.stereotype.Repository;
import org.springframework.util.StringUtils;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;<%}%>
<% if (databaseType == 'sql') { %>/**
* Spring Data JPA repository for the User entity.
*/<% } %><% if (databaseType == 'mongodb') { %>/**
* Spring Data MongoDB repository for the User entity.
*/<% } %><% if (databaseType == 'cassandra') { %>/**
* Cassandra repository for the User entity.
*/<% } %><% if (databaseType == 'sql' || databaseType == 'mongodb') { %>
public interface UserRepository extends <% if (databaseType == 'sql') { %>JpaRepository<User, Long><% } %><% if (databaseType == 'mongodb') { %>MongoRepository<User, String><% } %> {
Optional<User> findOneByActivationKey(String activationKey);
List<User> findAllByActivatedIsFalseAndCreatedDateBefore(ZonedDateTime dateTime);
Optional<User> findOneByResetKey(String resetKey);
Optional<User> findOneByEmail(String email);
Optional<User> findOneByLogin(String login);
Optional<User> findOneById(<%= pkType %> userId);
<%_ if (databaseType == 'sql') { _%>
@Query(value = "select distinct user from User user join fetch user.authorities",
countQuery = "select count(user) from User user")
Page<User> findAllWithAuthorities(Pageable pageable);
<%_ } _%>
@Override
void delete(User t);
}<% } else if (databaseType == 'cassandra') { %>
@Repository
public class UserRepository {
@Inject
private Session session;
private Mapper<User> mapper;
private PreparedStatement findAllStmt;
private PreparedStatement findOneByActivationKeyStmt;
private PreparedStatement findOneByResetKeyStmt;
private PreparedStatement insertByActivationKeyStmt;
private PreparedStatement insertByResetKeyStmt;
private PreparedStatement deleteByActivationKeyStmt;
private PreparedStatement deleteByResetKeyStmt;
private PreparedStatement findOneByLoginStmt;
private PreparedStatement insertByLoginStmt;
private PreparedStatement deleteByLoginStmt;
private PreparedStatement findOneByEmailStmt;
private PreparedStatement insertByEmailStmt;
private PreparedStatement deleteByEmailStmt;
@PostConstruct
public void init() {
mapper = new MappingManager(session).mapper(User.class);
findAllStmt = session.prepare("SELECT * FROM user");
findOneByActivationKeyStmt = session.prepare(
"SELECT id " +
"FROM user_by_activation_key " +
"WHERE activation_key = :activation_key");
findOneByResetKeyStmt = session.prepare(
"SELECT id " +
"FROM user_by_reset_key " +
"WHERE reset_key = :reset_key");
insertByActivationKeyStmt = session.prepare(
"INSERT INTO user_by_activation_key (activation_key, id) " +
"VALUES (:activation_key, :id)");
insertByResetKeyStmt = session.prepare(
"INSERT INTO user_by_reset_key (reset_key, id) " +
"VALUES (:reset_key, :id)");
deleteByActivationKeyStmt = session.prepare(
"DELETE FROM user_by_activation_key " +
"WHERE activation_key = :activation_key");
deleteByResetKeyStmt = session.prepare(
"DELETE FROM user_by_reset_key " +
"WHERE reset_key = :reset_key");
findOneByLoginStmt = session.prepare(
"SELECT id " +
"FROM user_by_login " +
"WHERE login = :login");
insertByLoginStmt = session.prepare(
"INSERT INTO user_by_login (login, id) " +
"VALUES (:login, :id)");
deleteByLoginStmt = session.prepare(
"DELETE FROM user_by_login " +
"WHERE login = :login");
findOneByEmailStmt = session.prepare(
"SELECT id " +
"FROM user_by_email " +
"WHERE email = :email");
insertByEmailStmt = session.prepare(
"INSERT INTO user_by_email (email, id) " +
"VALUES (:email, :id)");
deleteByEmailStmt = session.prepare(
"DELETE FROM user_by_email " +
"WHERE email = :email");
}
public User findOne(String id) {
return mapper.get(id);
}
public Optional<User> findOneById(String id) {
return Optional.of(findOne(id));
}
public Optional<User> findOneByActivationKey(String activationKey) {
BoundStatement stmt = findOneByActivationKeyStmt.bind();
stmt.setString("activation_key", activationKey);
return findOneFromIndex(stmt);
}
public Optional<User> findOneByResetKey(String resetKey) {
BoundStatement stmt = findOneByResetKeyStmt.bind();
stmt.setString("reset_key", resetKey);
return findOneFromIndex(stmt);
}
public Optional<User> findOneByEmail(String email) {
BoundStatement stmt = findOneByEmailStmt.bind();
stmt.setString("email", email);
return findOneFromIndex(stmt);
}
public Optional<User> findOneByLogin(String login) {
BoundStatement stmt = findOneByLoginStmt.bind();
stmt.setString("login", login);
return findOneFromIndex(stmt);
}
public List<User> findAll() {
return mapper.map(session.execute(findAllStmt.bind())).all();
}
public User save(User user) {
User oldUser = mapper.get(user.getId());
if (oldUser != null) {
if (!StringUtils.isEmpty(oldUser.getActivationKey()) && !oldUser.getActivationKey().equals(user.getActivationKey())) {
session.execute(deleteByActivationKeyStmt.bind().setString("activation_key", oldUser.getActivationKey()));
}
if (!StringUtils.isEmpty(oldUser.getResetKey()) && !oldUser.getResetKey().equals(user.getResetKey())) {
session.execute(deleteByResetKeyStmt.bind().setString("reset_key", oldUser.getResetKey()));
}
if (!StringUtils.isEmpty(oldUser.getLogin()) && !oldUser.getLogin().equals(user.getLogin())) {
session.execute(deleteByLoginStmt.bind().setString("login", oldUser.getLogin()));
}
if (!StringUtils.isEmpty(oldUser.getEmail()) && !oldUser.getEmail().equals(user.getEmail())) {
session.execute(deleteByEmailStmt.bind().setString("email", oldUser.getEmail()));
}
}
BatchStatement batch = new BatchStatement();
batch.add(mapper.saveQuery(user));
if (!StringUtils.isEmpty(user.getActivationKey())) {
batch.add(insertByActivationKeyStmt.bind()
.setString("activation_key", user.getActivationKey())
.setString("id", user.getId()));
}
if (!StringUtils.isEmpty(user.getResetKey())) {
batch.add(insertByResetKeyStmt.bind()
.setString("reset_key", user.getResetKey())
.setString("id", user.getId()));
}
batch.add(insertByLoginStmt.bind()
.setString("login", user.getLogin())
.setString("id", user.getId()));
batch.add(insertByEmailStmt.bind()
.setString("email", user.getEmail())
.setString("id", user.getId()));
session.execute(batch);
return user;
}
public void delete(User user) {
BatchStatement batch = new BatchStatement();
batch.add(mapper.deleteQuery(user));
if (!StringUtils.isEmpty(user.getActivationKey())) {
batch.add(deleteByActivationKeyStmt.bind().setString("activation_key", user.getActivationKey()));
}
if (!StringUtils.isEmpty(user.getResetKey())) {
batch.add(deleteByResetKeyStmt.bind().setString("reset_key", user.getResetKey()));
}
batch.add(deleteByLoginStmt.bind().setString("login", user.getLogin()));
batch.add(deleteByEmailStmt.bind().setString("email", user.getEmail()));
session.execute(batch);
}
private Optional<User> findOneFromIndex(BoundStatement stmt) {
ResultSet rs = session.execute(stmt);
if (rs.isExhausted()) {
return Optional.empty();
}
return Optional.ofNullable(rs.one().getString("id"))
.map(id -> Optional.ofNullable(mapper.get(id)))
.get();
}
}<% } %>
|
Use an outer join to get users without authorities
Fix #4056
|
generators/server/templates/src/main/java/package/repository/_UserRepository.java
|
Use an outer join to get users without authorities
|
|
Java
|
apache-2.0
|
cc918ae72deeea6654301efedd7e537356d391f1
| 0
|
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.provision.maintenance;
import com.yahoo.component.AbstractComponent;
import com.yahoo.component.annotation.Inject;
import com.yahoo.concurrent.maintenance.Maintainer;
import com.yahoo.config.provision.Deployer;
import com.yahoo.config.provision.InfraDeployer;
import com.yahoo.config.provision.NodeType;
import com.yahoo.config.provision.Zone;
import com.yahoo.jdisc.Metric;
import com.yahoo.vespa.flags.FlagSource;
import com.yahoo.vespa.hosted.provision.NodeRepository;
import com.yahoo.vespa.hosted.provision.autoscale.MetricsFetcher;
import com.yahoo.vespa.hosted.provision.provisioning.ProvisionServiceProvider;
import com.yahoo.vespa.service.monitor.ServiceMonitor;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* A component which sets up all the node repo maintenance jobs.
*
* @author bratseth
*/
public class NodeRepositoryMaintenance extends AbstractComponent {
private final List<Maintainer> maintainers = new CopyOnWriteArrayList<>();
@SuppressWarnings("unused")
@Inject
public NodeRepositoryMaintenance(NodeRepository nodeRepository, Deployer deployer, InfraDeployer infraDeployer,
ServiceMonitor serviceMonitor,
Zone zone, Metric metric,
ProvisionServiceProvider provisionServiceProvider, FlagSource flagSource,
MetricsFetcher metricsFetcher) {
DefaultTimes defaults = new DefaultTimes(zone, deployer);
PeriodicApplicationMaintainer periodicApplicationMaintainer = new PeriodicApplicationMaintainer(deployer, metric, nodeRepository, defaults.redeployMaintainerInterval,
defaults.periodicRedeployInterval, flagSource);
InfrastructureProvisioner infrastructureProvisioner = new InfrastructureProvisioner(nodeRepository, infraDeployer, defaults.infrastructureProvisionInterval, metric);
maintainers.add(periodicApplicationMaintainer);
maintainers.add(infrastructureProvisioner);
maintainers.add(new NodeFailer(deployer, nodeRepository, defaults.failGrace, defaults.nodeFailerInterval, defaults.throttlePolicy, metric));
maintainers.add(new NodeHealthTracker(serviceMonitor, nodeRepository, defaults.nodeFailureStatusUpdateInterval, metric));
maintainers.add(new ExpeditedChangeApplicationMaintainer(deployer, metric, nodeRepository, defaults.expeditedChangeRedeployInterval));
maintainers.add(new ReservationExpirer(nodeRepository, defaults.reservationExpiry, metric));
maintainers.add(new RetiredExpirer(nodeRepository, deployer, metric, defaults.retiredInterval, defaults.retiredExpiry));
maintainers.add(new InactiveExpirer(nodeRepository, defaults.inactiveExpiry, Map.of(NodeType.config, defaults.inactiveConfigServerExpiry,
NodeType.controller, defaults.inactiveControllerExpiry),
metric));
maintainers.add(new FailedExpirer(nodeRepository, zone, defaults.failedExpirerInterval, metric));
maintainers.add(new DirtyExpirer(nodeRepository, defaults.dirtyExpiry, metric));
maintainers.add(new ProvisionedExpirer(nodeRepository, defaults.provisionedExpiry, metric));
maintainers.add(new NodeRebooter(nodeRepository, flagSource, metric));
maintainers.add(new MetricsReporter(nodeRepository, metric, serviceMonitor, periodicApplicationMaintainer::pendingDeployments, defaults.metricsInterval));
maintainers.add(new SpareCapacityMaintainer(deployer, nodeRepository, metric, defaults.spareCapacityMaintenanceInterval));
maintainers.add(new OsUpgradeActivator(nodeRepository, defaults.osUpgradeActivatorInterval, metric));
maintainers.add(new Rebalancer(deployer, nodeRepository, metric, defaults.rebalancerInterval));
maintainers.add(new NodeMetricsDbMaintainer(nodeRepository, metricsFetcher, defaults.nodeMetricsCollectionInterval, metric));
maintainers.add(new AutoscalingMaintainer(nodeRepository, deployer, metric, defaults.autoscalingInterval));
maintainers.add(new ScalingSuggestionsMaintainer(nodeRepository, defaults.scalingSuggestionsInterval, metric));
maintainers.add(new SwitchRebalancer(nodeRepository, defaults.switchRebalancerInterval, metric, deployer));
provisionServiceProvider.getLoadBalancerService()
.map(lbService -> new LoadBalancerExpirer(nodeRepository, defaults.loadBalancerExpirerInterval, lbService, metric))
.ifPresent(maintainers::add);
provisionServiceProvider.getHostProvisioner()
.map(hostProvisioner -> new DynamicProvisioningMaintainer(nodeRepository, defaults.dynamicProvisionerInterval, hostProvisioner, flagSource, metric))
.ifPresent(maintainers::add);
provisionServiceProvider.getHostProvisioner()
.map(hostProvisioner -> new HostRetirer(nodeRepository, defaults.hostRetirerInterval, metric, hostProvisioner))
.ifPresent(maintainers::add);
// The DuperModel is filled with infrastructure applications by the infrastructure provisioner, so explicitly run that now
infrastructureProvisioner.maintainButThrowOnException();
}
@Override
public void deconstruct() {
maintainers.forEach(Maintainer::shutdown);
maintainers.forEach(Maintainer::awaitShutdown);
}
private static class DefaultTimes {
/** Minimum time to wait between deployments by periodic application maintainer*/
private final Duration periodicRedeployInterval;
/** Time between each run of maintainer that does periodic redeployment */
private final Duration redeployMaintainerInterval;
/** Applications are redeployed after manual operator changes within this time period */
private final Duration expeditedChangeRedeployInterval;
/** The time a node must be continuously unresponsive before it is failed */
private final Duration failGrace;
private final Duration reservationExpiry;
private final Duration inactiveExpiry;
private final Duration inactiveConfigServerExpiry;
private final Duration inactiveControllerExpiry;
private final Duration retiredExpiry;
private final Duration failedExpirerInterval;
private final Duration dirtyExpiry;
private final Duration provisionedExpiry;
private final Duration spareCapacityMaintenanceInterval;
private final Duration metricsInterval;
private final Duration nodeFailerInterval;
private final Duration nodeFailureStatusUpdateInterval;
private final Duration retiredInterval;
private final Duration infrastructureProvisionInterval;
private final Duration loadBalancerExpirerInterval;
private final Duration dynamicProvisionerInterval;
private final Duration osUpgradeActivatorInterval;
private final Duration rebalancerInterval;
private final Duration nodeMetricsCollectionInterval;
private final Duration autoscalingInterval;
private final Duration scalingSuggestionsInterval;
private final Duration switchRebalancerInterval;
private final Duration hostRetirerInterval;
private final NodeFailer.ThrottlePolicy throttlePolicy;
DefaultTimes(Zone zone, Deployer deployer) {
autoscalingInterval = Duration.ofMinutes(5);
dynamicProvisionerInterval = Duration.ofMinutes(3);
failedExpirerInterval = Duration.ofMinutes(10);
failGrace = Duration.ofMinutes(20);
infrastructureProvisionInterval = Duration.ofMinutes(3);
loadBalancerExpirerInterval = Duration.ofMinutes(5);
metricsInterval = Duration.ofMinutes(1);
nodeFailerInterval = Duration.ofMinutes(7);
nodeFailureStatusUpdateInterval = Duration.ofMinutes(2);
nodeMetricsCollectionInterval = Duration.ofMinutes(1);
expeditedChangeRedeployInterval = Duration.ofMinutes(3);
// Vespa upgrade frequency is higher in CD so (de)activate OS upgrades more frequently as well
osUpgradeActivatorInterval = zone.system().isCd() ? Duration.ofSeconds(30) : Duration.ofMinutes(5);
periodicRedeployInterval = Duration.ofMinutes(60);
provisionedExpiry = zone.getCloud().dynamicProvisioning() ? Duration.ofMinutes(40) : Duration.ofHours(4);
rebalancerInterval = Duration.ofMinutes(120);
redeployMaintainerInterval = Duration.ofMinutes(1);
// Need to be long enough for deployment to be finished for all config model versions
reservationExpiry = deployer.serverDeployTimeout();
scalingSuggestionsInterval = Duration.ofMinutes(31);
spareCapacityMaintenanceInterval = Duration.ofMinutes(30);
switchRebalancerInterval = Duration.ofHours(1);
throttlePolicy = NodeFailer.ThrottlePolicy.hosted;
inactiveConfigServerExpiry = Duration.ofMinutes(5);
inactiveControllerExpiry = Duration.ofMinutes(5);
hostRetirerInterval = Duration.ofMinutes(30);
if (zone.environment().isProduction() && ! zone.system().isCd()) {
inactiveExpiry = Duration.ofHours(4); // enough time for the application owner to discover and redeploy
retiredInterval = Duration.ofMinutes(15);
dirtyExpiry = Duration.ofHours(2); // enough time to clean the node
retiredExpiry = Duration.ofDays(4); // give up migrating data after 4 days
} else {
// long enough that nodes aren't reused immediately and delete can happen on all config servers
// with time enough to clean up even with ZK connection issues on config servers
inactiveExpiry = Duration.ofMinutes(1);
retiredInterval = Duration.ofMinutes(1);
dirtyExpiry = Duration.ofMinutes(30);
retiredExpiry = Duration.ofDays(1);
}
}
}
}
|
node-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/NodeRepositoryMaintenance.java
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.provision.maintenance;
import com.yahoo.component.AbstractComponent;
import com.yahoo.component.annotation.Inject;
import com.yahoo.concurrent.maintenance.Maintainer;
import com.yahoo.config.provision.Deployer;
import com.yahoo.config.provision.InfraDeployer;
import com.yahoo.config.provision.NodeType;
import com.yahoo.config.provision.Zone;
import com.yahoo.jdisc.Metric;
import com.yahoo.vespa.flags.FlagSource;
import com.yahoo.vespa.hosted.provision.NodeRepository;
import com.yahoo.vespa.hosted.provision.autoscale.MetricsFetcher;
import com.yahoo.vespa.hosted.provision.provisioning.ProvisionServiceProvider;
import com.yahoo.vespa.service.monitor.ServiceMonitor;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* A component which sets up all the node repo maintenance jobs.
*
* @author bratseth
*/
public class NodeRepositoryMaintenance extends AbstractComponent {
private final List<Maintainer> maintainers = new CopyOnWriteArrayList<>();
@SuppressWarnings("unused")
@Inject
public NodeRepositoryMaintenance(NodeRepository nodeRepository, Deployer deployer, InfraDeployer infraDeployer,
ServiceMonitor serviceMonitor,
Zone zone, Metric metric,
ProvisionServiceProvider provisionServiceProvider, FlagSource flagSource,
MetricsFetcher metricsFetcher) {
DefaultTimes defaults = new DefaultTimes(zone, deployer);
PeriodicApplicationMaintainer periodicApplicationMaintainer = new PeriodicApplicationMaintainer(deployer, metric, nodeRepository, defaults.redeployMaintainerInterval,
defaults.periodicRedeployInterval, flagSource);
InfrastructureProvisioner infrastructureProvisioner = new InfrastructureProvisioner(nodeRepository, infraDeployer, defaults.infrastructureProvisionInterval, metric);
maintainers.add(periodicApplicationMaintainer);
maintainers.add(infrastructureProvisioner);
maintainers.add(new NodeFailer(deployer, nodeRepository, defaults.failGrace, defaults.nodeFailerInterval, defaults.throttlePolicy, metric));
maintainers.add(new NodeHealthTracker(serviceMonitor, nodeRepository, defaults.nodeFailureStatusUpdateInterval, metric));
maintainers.add(new ExpeditedChangeApplicationMaintainer(deployer, metric, nodeRepository, defaults.expeditedChangeRedeployInterval));
maintainers.add(new ReservationExpirer(nodeRepository, defaults.reservationExpiry, metric));
maintainers.add(new RetiredExpirer(nodeRepository, deployer, metric, defaults.retiredInterval, defaults.retiredExpiry));
maintainers.add(new InactiveExpirer(nodeRepository, defaults.inactiveExpiry, Map.of(NodeType.config, defaults.inactiveConfigServerExpiry,
NodeType.controller, defaults.inactiveControllerExpiry),
metric));
maintainers.add(new FailedExpirer(nodeRepository, zone, defaults.failedExpirerInterval, metric));
maintainers.add(new DirtyExpirer(nodeRepository, defaults.dirtyExpiry, metric));
maintainers.add(new ProvisionedExpirer(nodeRepository, defaults.provisionedExpiry, metric));
maintainers.add(new NodeRebooter(nodeRepository, flagSource, metric));
maintainers.add(new MetricsReporter(nodeRepository, metric, serviceMonitor, periodicApplicationMaintainer::pendingDeployments, defaults.metricsInterval));
maintainers.add(new SpareCapacityMaintainer(deployer, nodeRepository, metric, defaults.spareCapacityMaintenanceInterval));
maintainers.add(new OsUpgradeActivator(nodeRepository, defaults.osUpgradeActivatorInterval, metric));
maintainers.add(new Rebalancer(deployer, nodeRepository, metric, defaults.rebalancerInterval));
maintainers.add(new NodeMetricsDbMaintainer(nodeRepository, metricsFetcher, defaults.nodeMetricsCollectionInterval, metric));
maintainers.add(new AutoscalingMaintainer(nodeRepository, deployer, metric, defaults.autoscalingInterval));
maintainers.add(new ScalingSuggestionsMaintainer(nodeRepository, defaults.scalingSuggestionsInterval, metric));
maintainers.add(new SwitchRebalancer(nodeRepository, defaults.switchRebalancerInterval, metric, deployer));
provisionServiceProvider.getLoadBalancerService()
.map(lbService -> new LoadBalancerExpirer(nodeRepository, defaults.loadBalancerExpirerInterval, lbService, metric))
.ifPresent(maintainers::add);
provisionServiceProvider.getHostProvisioner()
.map(hostProvisioner -> new DynamicProvisioningMaintainer(nodeRepository, defaults.dynamicProvisionerInterval, hostProvisioner, flagSource, metric))
.ifPresent(maintainers::add);
provisionServiceProvider.getHostProvisioner()
.map(hostProvisioner -> new HostRetirer(nodeRepository, defaults.hostRetirerInterval, metric, hostProvisioner))
.ifPresent(maintainers::add);
// The DuperModel is filled with infrastructure applications by the infrastructure provisioner, so explicitly run that now
infrastructureProvisioner.maintainButThrowOnException();
}
@Override
public void deconstruct() {
maintainers.forEach(Maintainer::shutdown);
maintainers.forEach(Maintainer::awaitShutdown);
}
private static class DefaultTimes {
/** Minimum time to wait between deployments by periodic application maintainer*/
private final Duration periodicRedeployInterval;
/** Time between each run of maintainer that does periodic redeployment */
private final Duration redeployMaintainerInterval;
/** Applications are redeployed after manual operator changes within this time period */
private final Duration expeditedChangeRedeployInterval;
/** The time a node must be continuously unresponsive before it is failed */
private final Duration failGrace;
private final Duration reservationExpiry;
private final Duration inactiveExpiry;
private final Duration inactiveConfigServerExpiry;
private final Duration inactiveControllerExpiry;
private final Duration retiredExpiry;
private final Duration failedExpirerInterval;
private final Duration dirtyExpiry;
private final Duration provisionedExpiry;
private final Duration spareCapacityMaintenanceInterval;
private final Duration metricsInterval;
private final Duration nodeFailerInterval;
private final Duration nodeFailureStatusUpdateInterval;
private final Duration retiredInterval;
private final Duration infrastructureProvisionInterval;
private final Duration loadBalancerExpirerInterval;
private final Duration dynamicProvisionerInterval;
private final Duration osUpgradeActivatorInterval;
private final Duration rebalancerInterval;
private final Duration nodeMetricsCollectionInterval;
private final Duration autoscalingInterval;
private final Duration scalingSuggestionsInterval;
private final Duration switchRebalancerInterval;
private final Duration hostRetirerInterval;
private final NodeFailer.ThrottlePolicy throttlePolicy;
DefaultTimes(Zone zone, Deployer deployer) {
autoscalingInterval = Duration.ofMinutes(5);
dynamicProvisionerInterval = Duration.ofMinutes(3);
failedExpirerInterval = Duration.ofMinutes(10);
failGrace = Duration.ofMinutes(30);
infrastructureProvisionInterval = Duration.ofMinutes(3);
loadBalancerExpirerInterval = Duration.ofMinutes(5);
metricsInterval = Duration.ofMinutes(1);
nodeFailerInterval = Duration.ofMinutes(9);
nodeFailureStatusUpdateInterval = Duration.ofMinutes(2);
nodeMetricsCollectionInterval = Duration.ofMinutes(1);
expeditedChangeRedeployInterval = Duration.ofMinutes(3);
// Vespa upgrade frequency is higher in CD so (de)activate OS upgrades more frequently as well
osUpgradeActivatorInterval = zone.system().isCd() ? Duration.ofSeconds(30) : Duration.ofMinutes(5);
periodicRedeployInterval = Duration.ofMinutes(60);
provisionedExpiry = zone.getCloud().dynamicProvisioning() ? Duration.ofMinutes(40) : Duration.ofHours(4);
rebalancerInterval = Duration.ofMinutes(120);
redeployMaintainerInterval = Duration.ofMinutes(1);
// Need to be long enough for deployment to be finished for all config model versions
reservationExpiry = deployer.serverDeployTimeout();
scalingSuggestionsInterval = Duration.ofMinutes(31);
spareCapacityMaintenanceInterval = Duration.ofMinutes(30);
switchRebalancerInterval = Duration.ofHours(1);
throttlePolicy = NodeFailer.ThrottlePolicy.hosted;
inactiveConfigServerExpiry = Duration.ofMinutes(5);
inactiveControllerExpiry = Duration.ofMinutes(5);
hostRetirerInterval = Duration.ofMinutes(30);
if (zone.environment().isProduction() && ! zone.system().isCd()) {
inactiveExpiry = Duration.ofHours(4); // enough time for the application owner to discover and redeploy
retiredInterval = Duration.ofMinutes(15);
dirtyExpiry = Duration.ofHours(2); // enough time to clean the node
retiredExpiry = Duration.ofDays(4); // give up migrating data after 4 days
} else {
// long enough that nodes aren't reused immediately and delete can happen on all config servers
// with time enough to clean up even with ZK connection issues on config servers
inactiveExpiry = Duration.ofMinutes(1);
retiredInterval = Duration.ofMinutes(1);
dirtyExpiry = Duration.ofMinutes(30);
retiredExpiry = Duration.ofDays(1);
}
}
}
}
|
Fail nodes in 20-27 minutes instead of 30-39
|
node-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/NodeRepositoryMaintenance.java
|
Fail nodes in 20-27 minutes instead of 30-39
|
|
Java
|
apache-2.0
|
65efab34f4100c7f4ce8942ce5e900c6e7bfc49b
| 0
|
phax/ph-oton,phax/ph-oton,phax/ph-oton
|
/**
* Copyright (C) 2014-2015 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.photon.bootstrap3.form;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import com.helger.commons.annotations.OverrideOnDemand;
import com.helger.commons.collections.CollectionHelper;
import com.helger.commons.string.StringHelper;
import com.helger.html.css.DefaultCSSClassProvider;
import com.helger.html.css.ICSSClassProvider;
import com.helger.html.hc.IHCControl;
import com.helger.html.hc.IHCElement;
import com.helger.html.hc.IHCElementWithChildren;
import com.helger.html.hc.IHCNode;
import com.helger.html.hc.html.HCCheckBox;
import com.helger.html.hc.html.HCDiv;
import com.helger.html.hc.html.HCLabel;
import com.helger.html.hc.html.HCRadioButton;
import com.helger.html.hc.htmlext.HCUtils;
import com.helger.html.hc.impl.AbstractHCInput;
import com.helger.photon.bootstrap3.BootstrapHelper;
import com.helger.photon.bootstrap3.CBootstrapCSS;
import com.helger.photon.bootstrap3.grid.BootstrapGridSpec;
import com.helger.photon.uicore.html.formlabel.HCFormLabel;
import com.helger.photon.uicore.html.formlabel.HCFormLabelUtils;
import com.helger.validation.error.IError;
import com.helger.validation.error.IErrorList;
/**
* This is the default implementation of {@link IBootstrapFormGroupRenderer}
* which performs standard rendering. It offers the possibility to modify
* certain styling by overriding the provided protected methods.
*
* @author Philip Helger
*/
@Immutable
public class DefaultBootstrapFormGroupRenderer implements IBootstrapFormGroupRenderer
{
public static final ICSSClassProvider CSS_CLASS_FORM_GROUP_HELP_TEXT = DefaultCSSClassProvider.create ("form-group-help-text");
public static final ICSSClassProvider CSS_CLASS_FORM_GROUP_ERROR_TEXT = DefaultCSSClassProvider.create ("form-group-error-text");
private boolean m_bUseIcons = false;
public DefaultBootstrapFormGroupRenderer ()
{}
public boolean isUseIcons ()
{
return m_bUseIcons;
}
public void setUseIcons (final boolean bUseIcons)
{
m_bUseIcons = bUseIcons;
}
/**
* Modify the first control that is inserted. This method is only called when
* a label is present.
*
* @param aLabel
* The label that was provided. Never <code>null</code>.
* @param aFirstControl
* The first control that was provided. Never <code>null</code>.
*/
@OverrideOnDemand
protected void modifyFirstControlIfLabelIsPresent (@Nonnull final IHCElementWithChildren <?> aLabel,
@Nonnull final IHCControl <?> aFirstControl)
{
// Set the default placeholder (if none is present)
if (aFirstControl instanceof AbstractHCInput <?>)
{
final AbstractHCInput <?> aEdit = (AbstractHCInput <?>) aFirstControl;
// Only check for null, so that empty string overrides this
// default behaviour
if (aEdit.getPlaceholder () == null)
{
if (aLabel instanceof HCFormLabel)
{
// Special handling for the form label, which has explicit support for
// label texts
aEdit.setPlaceholder (((HCFormLabel) aLabel).getLabelText ());
}
else
{
// Trim eventually trailing ":" from string
String sNewPlaceholder = StringHelper.trimEnd (aLabel.getPlainText (), HCFormLabelUtils.LABEL_END);
// Trim trailing "*" or "°" marker
sNewPlaceholder = StringHelper.trimEnd (sNewPlaceholder, HCFormLabelUtils.SIGN_ALTERNATIVE);
sNewPlaceholder = StringHelper.trimEnd (sNewPlaceholder, HCFormLabelUtils.SIGN_MANDATORY);
aEdit.setPlaceholder (sNewPlaceholder);
}
}
}
}
/**
* Create the help text node
*
* @param aHelpText
* The source help text. Never <code>null</code>.
* @return Never <code>null</code>.
*/
@Nonnull
@OverrideOnDemand
protected IHCElement <?> createHelpTextNode (@Nonnull final IHCNode aHelpText)
{
final BootstrapHelpBlock aHelpBlock = new BootstrapHelpBlock ().addClass (CSS_CLASS_FORM_GROUP_HELP_TEXT);
aHelpBlock.addChild (aHelpText);
return aHelpBlock;
}
/**
* Retrieve an optional CSS class that is provided to the final node. This
* method is only called if a non-<code>null</code> and non-empty error list
* is present.
*
* @param aErrorList
* The error list. May be <code>null</code>.
* @return May be <code>null</code> to indicate no CSS class.
*/
@Nullable
@OverrideOnDemand
protected EBootstrapFormGroupState getFormGroupStateFromErrorList (@Nullable final IErrorList aErrorList)
{
if (aErrorList != null && !aErrorList.isEmpty ())
{
if (aErrorList.containsAtLeastOneError ())
return EBootstrapFormGroupState.ERROR;
if (aErrorList.containsAtLeastOneFailure ())
return EBootstrapFormGroupState.WARNING;
}
return null;
}
/**
* Create the node for a single error.
*
* @param aError
* The provided error. Never <code>null</code>.
* @return Never <code>null</code>.
*/
@Nonnull
@OverrideOnDemand
protected IHCElement <?> createSingleErrorNode (@Nonnull final IError aError)
{
final BootstrapHelpBlock aErrorBlock = new BootstrapHelpBlock ().addClass (CSS_CLASS_FORM_GROUP_ERROR_TEXT);
aErrorBlock.addChild (aError.getErrorText ());
return aErrorBlock;
}
/**
* Callback possibility to change the finally created node before it is
* returned. By default nothing happens in here.
*
* @param aForm
* The source form. Never <code>null</code>.
* @param aFormGroup
* The source form group. Never <code>null</code>.
* @param aFinalNode
* The created node so far. Never <code>null</code>.
*/
@OverrideOnDemand
protected void modifyFinalNode (@Nonnull final IBootstrapFormGroupContainer aForm,
@Nonnull final BootstrapFormGroup aFormGroup,
@Nonnull final HCDiv aFinalNode)
{}
@Nonnull
public IHCElement <?> renderFormGroup (@Nonnull final IBootstrapFormGroupContainer aForm,
@Nonnull final BootstrapFormGroup aFormGroup)
{
final EBootstrapFormType eFormType = aForm.getFormType ();
final BootstrapGridSpec aLeftGrid = aForm.getLeft ();
final BootstrapGridSpec aRightGrid = aForm.getRight ();
final HCFormLabel aLabel = aFormGroup.getLabel ();
final IHCNode aCtrls = aFormGroup.getCtrl ();
final IHCNode aHelpText = aFormGroup.getHelpText ();
final IErrorList aErrorList = aFormGroup.getErrorList ();
EBootstrapFormGroupState eState = getFormGroupStateFromErrorList (aErrorList);
if (eState == null)
eState = aFormGroup.getState ();
final List <IHCControl <?>> aAllCtrls = HCUtils.getAllHCControls (aCtrls);
// Set CSS class to all contained controls
BootstrapHelper.markAsFormControls (aAllCtrls);
final IHCControl <?> aFirstControl = CollectionHelper.getFirstElement (aAllCtrls);
HCDiv aFinalNode;
final boolean bFirstControlIsCheckBox = aAllCtrls.size () == 1 && aFirstControl instanceof HCCheckBox;
final boolean bFirstControlIsRadioButton = aAllCtrls.size () == 1 && aFirstControl instanceof HCRadioButton;
boolean bUseIcons = false;
if (bFirstControlIsCheckBox || bFirstControlIsRadioButton)
{
// Never icons for check box/radio button
// Check box or radio button
final HCDiv aCtrlDiv = new HCDiv ();
if (bFirstControlIsCheckBox)
aCtrlDiv.addClass (CBootstrapCSS.CHECKBOX);
else
if (bFirstControlIsRadioButton)
aCtrlDiv.addClass (CBootstrapCSS.RADIO);
if (aLabel == null || !aLabel.hasChildren ())
{
aCtrlDiv.addChild (new HCLabel ().addChild (aCtrls));
}
else
{
if (aLabel.isTextLabel ())
{
// Use only the text
final String sLabelText = aLabel.getLabelText ();
aLabel.removeAllChildren ().addChild (sLabelText);
}
aLabel.addChild (0, aCtrls);
aLabel.addChild (1, " ");
aCtrlDiv.addChild (aLabel);
}
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
final HCDiv aCtrlParent = new HCDiv ();
aLeftGrid.applyOffsetTo (aCtrlParent);
aRightGrid.applyTo (aCtrlParent);
aFinalNode = new HCDiv ().addClass (CBootstrapCSS.FORM_GROUP).addChild (aCtrlParent.addChild (aCtrlDiv));
}
else
aFinalNode = aCtrlDiv;
}
else
{
// Icons for edits?
bUseIcons = isUseIcons () && eState.isNotNone () && aFirstControl instanceof AbstractHCInput <?>;
// Set static class for all direct children which are not controls
final boolean bContainsFormControlStatic = aAllCtrls.isEmpty () &&
BootstrapHelper.containsFormControlStatic (aCtrls);
// Other control - add in form group
aFinalNode = new HCDiv ().addClass (CBootstrapCSS.FORM_GROUP);
if (aLabel != null && aLabel.hasChildren ())
{
// We have a label
// Screen reader only....
if (eFormType == EBootstrapFormType.INLINE)
aLabel.addClass (CBootstrapCSS.SR_ONLY);
else
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
aLabel.addClass (CBootstrapCSS.CONTROL_LABEL);
aLeftGrid.applyTo (aLabel);
}
if (aFirstControl != null)
{
// We have a label for a control
aLabel.setFor (aFirstControl);
modifyFirstControlIfLabelIsPresent (aLabel, aFirstControl);
}
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
final HCDiv aCtrlParent = new HCDiv ();
aRightGrid.applyTo (aCtrlParent);
if (bUseIcons)
aCtrlParent.addChild (eState.getIconAsNode ());
if (bContainsFormControlStatic)
aCtrlParent.addClass (CBootstrapCSS.FORM_CONTROL_STATIC);
aCtrlParent.addChild (aCtrls);
aFinalNode.addChildren (aLabel, aCtrlParent);
}
else
{
if (bContainsFormControlStatic)
BootstrapHelper.makeFormControlStatic (aCtrls);
aFinalNode.addChildren (aLabel, aCtrls);
if (bUseIcons)
aFinalNode.addChild (eState.getIconAsNode ());
}
}
else
{
// No label - just add controls
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
final HCDiv aCtrlParent = new HCDiv ();
aLeftGrid.applyOffsetTo (aCtrlParent);
aRightGrid.applyTo (aCtrlParent);
if (bUseIcons)
aCtrlParent.addChild (eState.getIconAsNode ());
if (bContainsFormControlStatic)
aCtrlParent.addClass (CBootstrapCSS.FORM_CONTROL_STATIC);
aCtrlParent.addChild (aCtrls);
aFinalNode.addChild (aCtrlParent);
}
else
{
if (bContainsFormControlStatic)
BootstrapHelper.makeFormControlStatic (aCtrls);
aFinalNode.addChild (aCtrls);
if (bUseIcons)
aFinalNode.addChild (eState.getIconAsNode ());
}
}
}
// Help text (only if a control is present)
if (aHelpText != null && aCtrls != null)
{
final IHCElement <?> aHelpTextNode = createHelpTextNode (aHelpText);
if (eFormType == EBootstrapFormType.INLINE)
aHelpTextNode.addClass (CBootstrapCSS.SR_ONLY);
if (eFormType == EBootstrapFormType.HORIZONTAL)
((HCDiv) aFinalNode.getLastChild ()).addChild (aHelpTextNode);
else
aFinalNode.addChild (aHelpTextNode);
}
// set specified highlighting state
aFinalNode.addClass (eState);
// Check form errors - highlighting
if (aErrorList != null && !aErrorList.isEmpty ())
{
for (final IError aError : aErrorList)
{
final IHCElement <?> aErrorNode = createSingleErrorNode (aError);
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
aLeftGrid.applyOffsetTo (aErrorNode);
aRightGrid.applyTo (aErrorNode);
}
aFinalNode.addChild (aErrorNode);
}
}
if (bUseIcons)
aFinalNode.addClass (CBootstrapCSS.HAS_FEEDBACK);
// Set ID, class and style
aFormGroup.applyBasicHTMLTo (aFinalNode);
modifyFinalNode (aForm, aFormGroup, aFinalNode);
return aFinalNode;
}
}
|
ph-oton-bootstrap3/src/main/java/com/helger/photon/bootstrap3/form/DefaultBootstrapFormGroupRenderer.java
|
/**
* Copyright (C) 2014-2015 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.photon.bootstrap3.form;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import com.helger.commons.annotations.OverrideOnDemand;
import com.helger.commons.collections.CollectionHelper;
import com.helger.commons.string.StringHelper;
import com.helger.html.css.DefaultCSSClassProvider;
import com.helger.html.css.ICSSClassProvider;
import com.helger.html.hc.IHCControl;
import com.helger.html.hc.IHCElement;
import com.helger.html.hc.IHCElementWithChildren;
import com.helger.html.hc.IHCNode;
import com.helger.html.hc.html.HCCheckBox;
import com.helger.html.hc.html.HCDiv;
import com.helger.html.hc.html.HCLabel;
import com.helger.html.hc.html.HCRadioButton;
import com.helger.html.hc.htmlext.HCUtils;
import com.helger.html.hc.impl.AbstractHCInput;
import com.helger.photon.bootstrap3.BootstrapHelper;
import com.helger.photon.bootstrap3.CBootstrapCSS;
import com.helger.photon.bootstrap3.grid.BootstrapGridSpec;
import com.helger.photon.uicore.html.formlabel.HCFormLabel;
import com.helger.photon.uicore.html.formlabel.HCFormLabelUtils;
import com.helger.validation.error.IError;
import com.helger.validation.error.IErrorList;
/**
* This is the default implementation of {@link IBootstrapFormGroupRenderer}
* which performs standard rendering. It offers the possibility to modify
* certain styling by overriding the provided protected methods.
*
* @author Philip Helger
*/
@Immutable
public class DefaultBootstrapFormGroupRenderer implements IBootstrapFormGroupRenderer
{
public static final ICSSClassProvider CSS_CLASS_FORM_GROUP_HELP_TEXT = DefaultCSSClassProvider.create ("form-group-help-text");
public static final ICSSClassProvider CSS_CLASS_FORM_GROUP_ERROR_TEXT = DefaultCSSClassProvider.create ("form-group-error-text");
private boolean m_bUseIcons = false;
public DefaultBootstrapFormGroupRenderer ()
{}
public boolean isUseIcons ()
{
return m_bUseIcons;
}
public void setUseIcons (final boolean bUseIcons)
{
m_bUseIcons = bUseIcons;
}
/**
* Modify the first control that is inserted. This method is only called when
* a label is present.
*
* @param aLabel
* The label that was provided. Never <code>null</code>.
* @param aFirstControl
* The first control that was provided. Never <code>null</code>.
*/
@OverrideOnDemand
protected void modifyFirstControlIfLabelIsPresent (@Nonnull final IHCElementWithChildren <?> aLabel,
@Nonnull final IHCControl <?> aFirstControl)
{
// Set the default placeholder (if none is present)
if (aFirstControl instanceof AbstractHCInput <?>)
{
final AbstractHCInput <?> aEdit = (AbstractHCInput <?>) aFirstControl;
// Only check for null, so that empty string overrides this
// default behaviour
if (aEdit.getPlaceholder () == null)
{
if (aLabel instanceof HCFormLabel)
{
// Special handling for the form label, which has explicit support for
// label texts
aEdit.setPlaceholder (((HCFormLabel) aLabel).getLabelText ());
}
else
{
// Trim eventually trailing ":" from string
String sNewPlaceholder = StringHelper.trimEnd (aLabel.getPlainText (), HCFormLabelUtils.LABEL_END);
// Trim trailing "*" or "°" marker
sNewPlaceholder = StringHelper.trimEnd (sNewPlaceholder, HCFormLabelUtils.SIGN_ALTERNATIVE);
sNewPlaceholder = StringHelper.trimEnd (sNewPlaceholder, HCFormLabelUtils.SIGN_MANDATORY);
aEdit.setPlaceholder (sNewPlaceholder);
}
}
}
}
/**
* Create the help text node
*
* @param aHelpText
* The source help text. Never <code>null</code>.
* @return Never <code>null</code>.
*/
@Nonnull
@OverrideOnDemand
protected IHCElement <?> createHelpTextNode (@Nonnull final IHCNode aHelpText)
{
final BootstrapHelpBlock aHelpBlock = new BootstrapHelpBlock ().addClass (CSS_CLASS_FORM_GROUP_HELP_TEXT);
aHelpBlock.addChild (aHelpText);
return aHelpBlock;
}
/**
* Retrieve an optional CSS class that is provided to the final node. This
* method is only called if a non-<code>null</code> and non-empty error list
* is present.
*
* @param aErrorList
* The error list. May be <code>null</code>.
* @return May be <code>null</code> to indicate no CSS class.
*/
@Nullable
@OverrideOnDemand
protected EBootstrapFormGroupState getFormGroupStateFromErrorList (@Nullable final IErrorList aErrorList)
{
if (aErrorList != null && !aErrorList.isEmpty ())
{
if (aErrorList.containsAtLeastOneError ())
return EBootstrapFormGroupState.ERROR;
if (aErrorList.containsAtLeastOneFailure ())
return EBootstrapFormGroupState.WARNING;
}
return null;
}
/**
* Create the node for a single error.
*
* @param aError
* The provided error. Never <code>null</code>.
* @return Never <code>null</code>.
*/
@Nonnull
@OverrideOnDemand
protected IHCElement <?> createSingleErrorNode (@Nonnull final IError aError)
{
final BootstrapHelpBlock aErrorBlock = new BootstrapHelpBlock ().addClass (CSS_CLASS_FORM_GROUP_ERROR_TEXT);
aErrorBlock.addChild (aError.getErrorText ());
return aErrorBlock;
}
/**
* Callback possibility to change the finally created node before it is
* returned. By default nothing happens in here.
*
* @param aForm
* The source form. Never <code>null</code>.
* @param aFormGroup
* The source form group. Never <code>null</code>.
* @param aFinalNode
* The created node so far. Never <code>null</code>.
*/
@OverrideOnDemand
protected void modifyFinalNode (@Nonnull final IBootstrapFormGroupContainer aForm,
@Nonnull final BootstrapFormGroup aFormGroup,
@Nonnull final HCDiv aFinalNode)
{}
@Nonnull
public IHCElement <?> renderFormGroup (@Nonnull final IBootstrapFormGroupContainer aForm,
@Nonnull final BootstrapFormGroup aFormGroup)
{
final EBootstrapFormType eFormType = aForm.getFormType ();
final BootstrapGridSpec aLeftGrid = aForm.getLeft ();
final BootstrapGridSpec aRightGrid = aForm.getRight ();
final HCFormLabel aLabel = aFormGroup.getLabel ();
final IHCNode aCtrls = aFormGroup.getCtrl ();
final IHCNode aHelpText = aFormGroup.getHelpText ();
final IErrorList aErrorList = aFormGroup.getErrorList ();
EBootstrapFormGroupState eState = getFormGroupStateFromErrorList (aErrorList);
if (eState == null)
eState = aFormGroup.getState ();
final List <IHCControl <?>> aAllCtrls = HCUtils.getAllHCControls (aCtrls);
// Set CSS class to all contained controls
BootstrapHelper.markAsFormControls (aAllCtrls);
final IHCControl <?> aFirstControl = CollectionHelper.getFirstElement (aAllCtrls);
HCDiv aFinalNode;
final boolean bFirstControlIsCheckBox = aAllCtrls.size () == 1 && aFirstControl instanceof HCCheckBox;
final boolean bFirstControlIsRadioButton = aAllCtrls.size () == 1 && aFirstControl instanceof HCRadioButton;
boolean bUseIcons = false;
if (bFirstControlIsCheckBox || bFirstControlIsRadioButton)
{
// Never icons for check box/radio button
// Check box or radio button
final HCDiv aCtrlDiv = new HCDiv ();
if (bFirstControlIsCheckBox)
aCtrlDiv.addClass (CBootstrapCSS.CHECKBOX);
else
if (bFirstControlIsRadioButton)
aCtrlDiv.addClass (CBootstrapCSS.RADIO);
if (aLabel == null || !aLabel.hasChildren ())
{
aCtrlDiv.addChild (new HCLabel ().addChild (aCtrls));
}
else
{
if (aLabel.isTextLabel ())
{
// Use only the text
final String sLabelText = aLabel.getLabelText ();
aLabel.removeAllChildren ().addChild (sLabelText);
}
aLabel.addChild (0, aCtrls);
aLabel.addChild (1, " ");
aCtrlDiv.addChild (aLabel);
}
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
final HCDiv aCtrlParent = new HCDiv ();
aLeftGrid.applyOffsetTo (aCtrlParent);
aRightGrid.applyTo (aCtrlParent);
aFinalNode = new HCDiv ().addClass (CBootstrapCSS.FORM_GROUP).addChild (aCtrlParent.addChild (aCtrlDiv));
}
else
aFinalNode = aCtrlDiv;
}
else
{
// Icons for edits?
bUseIcons = isUseIcons () && eState.isNotNone () && aFirstControl instanceof AbstractHCInput <?>;
// Set static class for all direct children which are not controls
final boolean bContainsFormControlStatic = aAllCtrls.isEmpty () &&
BootstrapHelper.containsFormControlStatic (aCtrls);
// Other control - add in form group
aFinalNode = new HCDiv ().addClass (CBootstrapCSS.FORM_GROUP);
if (aLabel != null && aLabel.hasChildren ())
{
// We have a label
// Screen reader only....
if (eFormType == EBootstrapFormType.INLINE)
aLabel.addClass (CBootstrapCSS.SR_ONLY);
else
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
aLabel.addClass (CBootstrapCSS.CONTROL_LABEL);
aLeftGrid.applyTo (aLabel);
}
if (aFirstControl != null)
{
// We have a label for a control
aLabel.setFor (aFirstControl);
modifyFirstControlIfLabelIsPresent (aLabel, aFirstControl);
}
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
final HCDiv aCtrlParent = new HCDiv ();
aRightGrid.applyTo (aCtrlParent);
if (bUseIcons)
aCtrlParent.addChild (eState.getIconAsNode ());
if (bContainsFormControlStatic)
aCtrlParent.addClass (CBootstrapCSS.FORM_CONTROL_STATIC);
aCtrlParent.addChild (aCtrls);
aFinalNode.addChildren (aLabel, aCtrlParent);
}
else
{
if (bContainsFormControlStatic)
BootstrapHelper.makeFormControlStatic (aCtrls);
aFinalNode.addChildren (aLabel, aCtrls);
if (bUseIcons)
aFinalNode.addChild (eState.getIconAsNode ());
}
}
else
{
// No label - just add controls
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
final HCDiv aCtrlParent = new HCDiv ();
aLeftGrid.applyOffsetTo (aCtrlParent);
aRightGrid.applyTo (aCtrlParent);
if (bUseIcons)
aCtrlParent.addChild (eState.getIconAsNode ());
if (bContainsFormControlStatic)
aCtrlParent.addClass (CBootstrapCSS.FORM_CONTROL_STATIC);
aCtrlParent.addChild (aCtrls);
aFinalNode.addChild (aCtrlParent);
}
else
{
if (bContainsFormControlStatic)
BootstrapHelper.makeFormControlStatic (aCtrls);
aFinalNode.addChild (aCtrls);
if (bUseIcons)
aFinalNode.addChild (eState.getIconAsNode ());
}
}
}
// Help text
if (aHelpText != null)
{
final IHCElement <?> aHelpTextNode = createHelpTextNode (aHelpText);
if (eFormType == EBootstrapFormType.INLINE)
aHelpTextNode.addClass (CBootstrapCSS.SR_ONLY);
if (eFormType == EBootstrapFormType.HORIZONTAL)
((HCDiv) aFinalNode.getLastChild ()).addChild (aHelpTextNode);
else
aFinalNode.addChild (aHelpTextNode);
}
// set specified highlighting state
aFinalNode.addClass (eState);
// Check form errors - highlighting
if (aErrorList != null && !aErrorList.isEmpty ())
{
for (final IError aError : aErrorList)
{
final IHCElement <?> aErrorNode = createSingleErrorNode (aError);
if (eFormType == EBootstrapFormType.HORIZONTAL)
{
aLeftGrid.applyOffsetTo (aErrorNode);
aRightGrid.applyTo (aErrorNode);
}
aFinalNode.addChild (aErrorNode);
}
}
if (bUseIcons)
aFinalNode.addClass (CBootstrapCSS.HAS_FEEDBACK);
// Set ID, class and style
aFormGroup.applyBasicHTMLTo (aFinalNode);
modifyFinalNode (aForm, aFormGroup, aFinalNode);
return aFinalNode;
}
}
|
Show help text only if a control is present
|
ph-oton-bootstrap3/src/main/java/com/helger/photon/bootstrap3/form/DefaultBootstrapFormGroupRenderer.java
|
Show help text only if a control is present
|
|
Java
|
apache-2.0
|
8e47376d464cc786989d2de60ab054299b7044b3
| 0
|
dgrigutsch/mosby,lenguyenthanh/mosby,vjames19/mosby,sockeqwe/mosby,gmail-msn/mosby,guyschaos/mosby,b-cuts/mosby,b-cuts/mosby,dgrigutsch/mosby,vjames19/mosby,sockeqwe/mosby,b-cuts/mosby,10211509/mosby,guyschaos/mosby,10211509/mosby,androidgilbert/mosby,androidgilbert/mosby,gmail-msn/mosby,gmail-msn/mosby,10211509/mosby,hanshou361248909/mosby,vjames19/mosby,dgrigutsch/mosby,hanshou361248909/mosby,androidgilbert/mosby,lenguyenthanh/mosby
|
/*
* Copyright 2015 Hannes Dorfmann.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hannesdorfmann.mosby.mvp;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import com.hannesdorfmann.mosby.mvp.delegate.ActivityMvpDelegate;
import com.hannesdorfmann.mosby.mvp.delegate.ActivityMvpDelegateCallback;
import com.hannesdorfmann.mosby.mvp.delegate.ActivityMvpDelegateImpl;
/**
* An Activity that uses an {@link MvpPresenter} to implement a Model-View-Presenter
* architecture.
*
* @author Hannes Dorfmann
* @since 1.0.0
*/
public abstract class MvpActivity<V extends MvpView, P extends MvpPresenter<V>>
extends AppCompatActivity implements ActivityMvpDelegateCallback<V, P>, MvpView {
protected ActivityMvpDelegate mvpDelegate;
protected P presenter;
protected boolean retainInstance;
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getMvpDelegate().onCreate(savedInstanceState);
}
@Override protected void onDestroy() {
super.onDestroy();
getMvpDelegate().onDestroy();
}
@Override protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
getMvpDelegate().onSaveInstanceState(outState);
}
@Override protected void onPause() {
super.onPause();
getMvpDelegate().onPause();
}
@Override protected void onResume() {
super.onResume();
getMvpDelegate().onResume();
}
@Override protected void onStart() {
super.onStart();
getMvpDelegate().onStart();
}
@Override protected void onStop() {
super.onStop();
getMvpDelegate().onStop();
}
@Override protected void onRestart() {
super.onRestart();
getMvpDelegate().onRestart();
}
@Override public void onContentChanged() {
super.onContentChanged();
getMvpDelegate().onContentChanged();
}
@Override protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
getMvpDelegate().onPostCreate(savedInstanceState);
}
/**
* Instantiate a presenter instance
*
* @return The {@link MvpPresenter} for this view
*/
@NonNull public abstract P createPresenter();
/**
* Get the mvp delegate. This is internally used for creating presenter, attaching and detaching
* view from presenter.
*
* <p><b>Please note that only one instance of mvp delegate should be used per Activity
* instance</b>.
* </p>
*
* <p>
* Only override this method if you really know what you are doing.
* </p>
*
* @return {@link ActivityMvpDelegateImpl}
*/
@NonNull protected ActivityMvpDelegate<V, P> getMvpDelegate() {
if (mvpDelegate == null) {
mvpDelegate = new ActivityMvpDelegateImpl(this);
}
return mvpDelegate;
}
@NonNull @Override public P getPresenter() {
return presenter;
}
@Override public void setPresenter(@NonNull P presenter) {
this.presenter = presenter;
}
@NonNull @Override public V getMvpView() {
return (V) this;
}
@Override public boolean isRetainInstance() {
return retainInstance;
}
@Override public boolean shouldInstanceBeRetained() {
return retainInstance && isChangingConfigurations();
}
@Override public void setRetainInstance(boolean retainInstance) {
this.retainInstance = retainInstance;
}
@Override public Object onRetainNonMosbyCustomNonConfigurationInstance() {
return null;
}
/**
* Internally used by Mosby. Use {@link #onRetainNonMosbyCustomNonConfigurationInstance()} and
* {@link #getNonMosbyLastCustomNonConfigurationInstance()}
*/
@Override public final Object onRetainCustomNonConfigurationInstance() {
return getMvpDelegate().onRetainCustomNonConfigurationInstance();
}
@Override public final Object getNonMosbyLastCustomNonConfigurationInstance() {
return getMvpDelegate().getNonMosbyLastCustomNonConfigurationInstance();
}
}
|
mvp/src/main/java/com/hannesdorfmann/mosby/mvp/MvpActivity.java
|
/*
* Copyright 2015 Hannes Dorfmann.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hannesdorfmann.mosby.mvp;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import com.hannesdorfmann.mosby.mvp.delegate.ActivityMvpDelegate;
import com.hannesdorfmann.mosby.mvp.delegate.ActivityMvpDelegateCallback;
import com.hannesdorfmann.mosby.mvp.delegate.ActivityMvpDelegateImpl;
/**
* An Activity that uses an {@link MvpPresenter} to implement a Model-View-Presenter
* architecture.
*
* @author Hannes Dorfmann
* @since 1.0.0
*/
public abstract class MvpActivity<V extends MvpView, P extends MvpPresenter<V>>
extends AppCompatActivity implements ActivityMvpDelegateCallback<V, P>, MvpView {
protected ActivityMvpDelegate mvpDelegate;
protected P presenter;
protected boolean retainInstance;
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getMvpDelegate().onCreate(savedInstanceState);
}
@Override protected void onDestroy() {
super.onDestroy();
getMvpDelegate().onDestroy();
}
@Override protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
getMvpDelegate().onSaveInstanceState(outState);
}
@Override protected void onPause() {
super.onPause();
getMvpDelegate().onPause();
}
@Override protected void onResume() {
super.onResume();
getMvpDelegate().onResume();
}
@Override protected void onStart() {
super.onStart();
getMvpDelegate().onStart();
}
@Override protected void onStop() {
super.onStop();
getMvpDelegate().onStop();
}
@Override protected void onRestart() {
super.onRestart();
getMvpDelegate().onRestart();
}
@Override public void onContentChanged() {
super.onContentChanged();
getMvpDelegate().onContentChanged();
}
@Override protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
getMvpDelegate().onPostCreate(savedInstanceState);
}
/**
* Instantiate a presenter instance
*
* @return The {@link MvpPresenter} for this view
*/
@NonNull public abstract P createPresenter();
/**
* Get the mvp delegate. This is internally used for creating presenter, attaching and detaching
* view from presenter.
*
* <p><b>Please note that only one instance of mvp delegate should be used per Activity
* instance</b>.
* </p>
*
* <p>
* Only override this method if you really know what you are doing.
* </p>
*
* @return {@link ActivityMvpDelegateImpl}
*/
@NonNull protected ActivityMvpDelegate<V, P> getMvpDelegate() {
if (mvpDelegate == null) {
mvpDelegate = new ActivityMvpDelegateImpl(this);
}
return mvpDelegate;
}
@NonNull @Override public P getPresenter() {
return presenter;
}
@Override public void setPresenter(@NonNull P presenter) {
this.presenter = presenter;
}
@NonNull @Override public V getMvpView() {
return (V) this;
}
@Override public boolean isRetainInstance() {
return retainInstance;
}
@Override public boolean shouldInstanceBeRetained() {
return retainInstance && isChangingConfigurations();
}
@Override public void setRetainInstance(boolean retainInstance) {
this.retainInstance = retainInstance;
}
@Override public Object onRetainNonMosbyCustomNonConfigurationInstance() {
return null;
}
@Override public Object onRetainCustomNonConfigurationInstance() {
return getMvpDelegate().onRetainCustomNonConfigurationInstance();
}
@Override public Object getNonMosbyLastCustomNonConfigurationInstance() {
return getMvpDelegate().getNonMosbyLastCustomNonConfigurationInstance();
}
}
|
MvpActivity: marking onRetainCustomNonConfigurationInstance() as final since internally used by mosby; New snapshot available
|
mvp/src/main/java/com/hannesdorfmann/mosby/mvp/MvpActivity.java
|
MvpActivity: marking onRetainCustomNonConfigurationInstance() as final since internally used by mosby; New snapshot available
|
|
Java
|
apache-2.0
|
6caf5a7822d3e005191856029cc2d69b5c9aa851
| 0
|
krissman/rhiot,rimolive/rhiot,rhiot/rhiot,rhiot/rhiot,rimolive/rhiot,rimolive/rhiot,lasombra/rhiot,rhiot/rhiot,rimolive/rhiot,rimolive/rhiot,krissman/rhiot,lasombra/rhiot,rhiot/rhiot,rimolive/rhiot,krissman/rhiot,rhiot/rhiot,lasombra/rhiot,krissman/rhiot,krissman/rhiot,rhiot/rhiot,krissman/rhiot,rhiot/rhiot,lasombra/rhiot,krissman/rhiot,rimolive/rhiot
|
/**
* Licensed to the Rhiot under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rhiot.component.deviceio.i2c.driver;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_ADCDATA;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_BUSID;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_CALIBRATION_END;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_CALIBRATION_START;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_CONTROL;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_DEVICEADDR;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_READTEMPCMD;
import io.rhiot.component.deviceio.DeviceIOConstants;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jdk.dio.i2cbus.I2CDevice;
public class BMP180Driver extends I2CDriverAbstract {
private static final transient Logger LOG = LoggerFactory.getLogger(BMP180Driver.class);
// Calibration variables
private short AC1;
private short AC2;
private short AC3;
private int AC4;
private int AC5;
private int AC6;
private short B1;
private short B2;
private short MB;
private short MC;
private short MD;
private BMP180OperatingMode mode = BMP180OperatingMode.STANDARD;
public BMP180Driver() {
super(BMP085_BUSID, BMP085_DEVICEADDR);
}
public BMP180Driver(I2CDevice device) {
super(device);
}
public BMP180OperatingMode getMode() {
return mode;
}
public void setMode(BMP180OperatingMode mode) {
this.mode = mode;
}
private void checkId() throws Exception {
ByteBuffer bb = ByteBuffer.allocate(1);
int bytesRead = read(BMP180Constants.BMP085_DEVICE_ID_ADDR,
DeviceIOConstants.CAMEL_I2C_DIO_SUBADDRESS_SIZE_BITS, bb);
LOG.debug("bytesRead=" + bytesRead);
if (bb.get(0) != BMP180Constants.BMP085_DEVICE_ID) {
throw new IOException("Could not read device identification");
}
}
@Override
public void start() throws Exception {
checkId();
int totalBytes = BMP085_CALIBRATION_END - BMP085_CALIBRATION_START + 1;
ByteBuffer bb = ByteBuffer.allocate(totalBytes);
int bytesRead = read(BMP085_CALIBRATION_START, 1, bb);
LOG.debug("bytesRead=" + bytesRead);
if (bytesRead != totalBytes) {
throw new IOException("Could not read calibration data");
}
AC1 = bb.getShort(0);
AC2 = bb.getShort(2);
AC3 = bb.getShort(4);
AC4 = (bb.getShort(6) < 0) ? (bb.getShort(6) + Short.MAX_VALUE * 2) : bb.getShort(6);
AC5 = (bb.getShort(8) < 0) ? (bb.getShort(8) + Short.MAX_VALUE * 2) : bb.getShort(8);
AC6 = (bb.getShort(10) < 0) ? (bb.getShort(10) + Short.MAX_VALUE * 2) : bb.getShort(10);
B1 = bb.getShort(12);
B2 = bb.getShort(14);
MB = bb.getShort(16);
MC = bb.getShort(18);
MD = bb.getShort(20);
LOG.info(String.format("AC1:%d, AC2:%d, AC3:%d, AC4:%d, AC5:%d, AC6:%d, B1:%d, B2:%d, MB:%d, MC:%d, MD:%d",
AC1, AC2, AC3, AC4, AC5, AC6, B1, B2, MB, MC, MD));
}
@Override
public void stop() {
}
public int readPressure() throws IOException {
long p = 0;
int UT = readRawTemperature();
int UP = readRawPressure();
UP = 85867;
int X1 = ((UT - AC6) * AC5) >> 15;
int X2 = (MC << 11) / (X1 + MD);
int B5 = X1 + X2;
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("B5 = " + B5);
int B6 = B5 - 4000;
X1 = (B2 * ((B6 * B6) >> 12)) >> 11;
X2 = (AC2 * B6) >> 11;
int X3 = X1 + X2;
int B3 = (((AC1 * 4 + X3) << mode.getOverSamplingSetting()) + 2) / 4;
LOG.debug("B6 = " + B6);
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("X3 = " + X3);
LOG.debug("B3 = " + B3);
X1 = (AC3 * B6) >> 13;
X2 = (B1 * ((B6 * B6) >> 12)) >> 16;
X3 = ((X1 + X2) + 2) >> 2;
long B4 = (AC4 * ((long) (X3 + 32768))) >> 15;
long B7 = ((long) UP - B3) * (50000 >> mode.getOverSamplingSetting());
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("X3 = " + X3);
LOG.debug("B4 = " + B4);
LOG.debug("B7 = " + B7);
if (B7 < 0x80000000) {
p = (B7 * 2) / B4;
} else {
p = (B7 / B4) * 2;
}
X1 = (int) ((p >> 8) * (p >> 8));
X1 = (X1 * 3038) >> 16;
X2 = (int) (-7357 * p) >> 16;
p = p + ((X1 + X2 + 3791) >> 4);
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("p = " + p);
return (int) p;
}
@Override
public Object get() throws Exception {
BMP180Value ret = new BMP180Value();
ret.setPressure(String.format("%d hPa", readPressure()));
ret.setTemperature(String.format("%+.2f ºC", readTemperature()));
return ret;
}
@Override
public Class getType() throws Exception {
return BMP180Value.class;
}
private int readRawPressure() throws IOException {
ByteBuffer bb = ByteBuffer.allocateDirect(1);
bb.put(0, (byte) mode.getPressureControlCommand());
int bytesWrote = write(BMP085_CONTROL, 1, bb);
if (bytesWrote != 1) {
throw new IOException("Could not write 1 bytes data");
}
sleep(mode.getWaitTime());
return readU24BigEndian(BMP085_ADCDATA) >> (8 - mode.getOverSamplingSetting());
// return readU16BigEndian(BMP085_ADCDATA);
}
private int readRawTemperature() throws IOException {
ByteBuffer bb = ByteBuffer.allocateDirect(1);
bb.put(0, BMP085_READTEMPCMD);
int bytesWrote = write(BMP085_CONTROL, 1, bb);
if (bytesWrote != 1) {
throw new IOException("Could not write 1 bytes data");
}
sleep(50);
return readU16BigEndian(BMP085_ADCDATA);
}
private float readTemperature() throws IOException {
int UT = readRawTemperature();
int X1 = ((UT - AC6) * AC5) >> 15;
int X2 = (MC << 11) / (X1 + MD);
int B5 = X1 + X2;
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("B5 = " + B5);
return ((B5 + 8) >> 4) / 10.0f;
}
}
|
datastream/components/camel-device-io/src/main/java/io/rhiot/component/deviceio/i2c/driver/BMP180Driver.java
|
/**
* Licensed to the Rhiot under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rhiot.component.deviceio.i2c.driver;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_ADCDATA;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_BUSID;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_CALIBRATION_END;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_CALIBRATION_START;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_CONTROL;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_DEVICEADDR;
import static io.rhiot.component.deviceio.i2c.driver.BMP180Constants.BMP085_READTEMPCMD;
import io.rhiot.component.deviceio.DeviceIOConstants;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jdk.dio.i2cbus.I2CDevice;
public class BMP180Driver extends I2CDriverAbstract {
private static final transient Logger LOG = LoggerFactory.getLogger(BMP180Driver.class);
// Calibration variables
private short AC1;
private short AC2;
private short AC3;
private int AC4;
private int AC5;
private int AC6;
private short B1;
private short B2;
private short MB;
private short MC;
private short MD;
private BMP180OperatingMode mode = BMP180OperatingMode.STANDARD;
public BMP180Driver() {
super(BMP085_BUSID, BMP085_DEVICEADDR);
}
public BMP180Driver(I2CDevice device) {
super(device);
}
public BMP180OperatingMode getMode() {
return mode;
}
public void setMode(BMP180OperatingMode mode) {
this.mode = mode;
}
private void checkId() throws Exception {
ByteBuffer bb = ByteBuffer.allocate(1);
int bytesRead = read(BMP180Constants.BMP085_DEVICE_ID_ADDR,
DeviceIOConstants.CAMEL_I2C_DIO_SUBADDRESS_SIZE_BITS, bb);
LOG.debug("bytesRead=" + bytesRead);
if (bb.get(0) != BMP180Constants.BMP085_DEVICE_ID) {
throw new IOException("Could not read device identification");
}
}
@Override
public void start() throws Exception {
checkId();
int totalBytes = BMP085_CALIBRATION_END - BMP085_CALIBRATION_START + 1;
ByteBuffer bb = ByteBuffer.allocate(totalBytes);
int bytesRead = read(BMP085_CALIBRATION_START, 1, bb);
LOG.debug("bytesRead=" + bytesRead);
if (bytesRead != totalBytes) {
throw new IOException("Could not read calibration data");
}
AC1 = bb.getShort(0);
AC2 = bb.getShort(2);
AC3 = bb.getShort(4);
AC4 = (bb.getShort(6) < 0) ? (bb.getShort(6) + Short.MAX_VALUE * 2) : bb.getShort(6);
AC5 = (bb.getShort(8) < 0) ? (bb.getShort(8) + Short.MAX_VALUE * 2) : bb.getShort(8);
AC6 = (bb.getShort(10) < 0) ? (bb.getShort(10) + Short.MAX_VALUE * 2) : bb.getShort(10);
B1 = bb.getShort(12);
B2 = bb.getShort(14);
MB = bb.getShort(16);
MC = bb.getShort(18);
MD = bb.getShort(20);
LOG.info(String.format("AC1:%d, AC2:%d, AC3:%d, AC4:%d, AC5:%d, AC6:%d, B1:%d, B2:%d, MB:%d, MC:%d, MD:%d",
AC1, AC2, AC3, AC4, AC5, AC6, B1, B2, MB, MC, MD));
}
@Override
public void stop() {
}
public int readPressure() throws IOException {
long p = 0;
int UT = readRawTemperature();
int UP = readRawPressure();
UP = 85867;
int X1 = ((UT - AC6) * AC5) >> 15;
int X2 = (MC << 11) / (X1 + MD);
int B5 = X1 + X2;
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("B5 = " + B5);
int B6 = B5 - 4000;
X1 = (B2 * ((B6 * B6) >> 12)) >> 11;
X2 = (AC2 * B6) >> 11;
int X3 = X1 + X2;
int B3 = (((AC1 * 4 + X3) << mode.getOverSamplingSetting()) + 2) / 4;
LOG.debug("B6 = " + B6);
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("X3 = " + X3);
LOG.debug("B3 = " + B3);
X1 = (AC3 * B6) >> 13;
X2 = (B1 * ((B6 * B6) >> 12)) >> 16;
X3 = ((X1 + X2) + 2) >> 2;
long B4 = (AC4 * ((long) (X3 + 32768))) >> 15;
long B7 = ((long) UP - B3) * (50000 >> mode.getOverSamplingSetting());
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("X3 = " + X3);
LOG.debug("B4 = " + B4);
LOG.debug("B7 = " + B7);
if (B7 < 0x80000000) {
p = (B7 * 2) / B4;
} else {
p = (B7 / B4) * 2;
}
X1 = (int) ((p >> 8) * (p >> 8));
X1 = (X1 * 3038) >> 16;
X2 = (int) (-7357 * p) >> 16;
p = p + ((X1 + X2 + 3791) >> 4);
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("p = " + p);
return (int) p;
}
@Override
public Object get() throws Exception {
BMP180Value ret = new BMP180Value();
ret.setPressure(readPressure());
ret.setTemperature(readTemperature());
return ret;
}
@Override
public Class getType() throws Exception {
return BMP180Value.class;
}
private int readRawPressure() throws IOException {
ByteBuffer bb = ByteBuffer.allocateDirect(1);
bb.put(0, (byte) mode.getPressureControlCommand());
int bytesWrote = write(BMP085_CONTROL, 1, bb);
if (bytesWrote != 1) {
throw new IOException("Could not write 1 bytes data");
}
sleep(mode.getWaitTime());
return readU24BigEndian(BMP085_ADCDATA) >> (8 - mode.getOverSamplingSetting());
// return readU16BigEndian(BMP085_ADCDATA);
}
private int readRawTemperature() throws IOException {
ByteBuffer bb = ByteBuffer.allocateDirect(1);
bb.put(0, BMP085_READTEMPCMD);
int bytesWrote = write(BMP085_CONTROL, 1, bb);
if (bytesWrote != 1) {
throw new IOException("Could not write 1 bytes data");
}
sleep(50);
return readU16BigEndian(BMP085_ADCDATA);
}
private float readTemperature() throws IOException {
int UT = readRawTemperature();
int X1 = ((UT - AC6) * AC5) >> 15;
int X2 = (MC << 11) / (X1 + MD);
int B5 = X1 + X2;
LOG.debug("X1 = " + X1);
LOG.debug("X2 = " + X2);
LOG.debug("B5 = " + B5);
return ((B5 + 8) >> 4) / 10.0f;
}
}
|
fix unit
|
datastream/components/camel-device-io/src/main/java/io/rhiot/component/deviceio/i2c/driver/BMP180Driver.java
|
fix unit
|
|
Java
|
bsd-3-clause
|
da5b8368af3d6533107c99e5509338e98f1c7dd4
| 0
|
knime-mpicbg/HCS-Tools,knime-mpicbg/HCS-Tools,knime-mpicbg/HCS-Tools,knime-mpicbg/HCS-Tools
|
package de.mpicbg.knime.hcs.base.nodes.manip.col.createinterval;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import javax.swing.BorderFactory;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JTextField;
import org.knime.core.data.BooleanValue;
import org.knime.core.data.DataColumnSpec;
import org.knime.core.data.DataTableSpec;
import org.knime.core.data.DataType;
import org.knime.core.data.DataValue;
import org.knime.core.data.DoubleValue;
import org.knime.core.data.IntValue;
import org.knime.core.node.InvalidSettingsException;
import org.knime.core.node.NodeDialogPane;
import org.knime.core.node.NodeSettingsRO;
import org.knime.core.node.NodeSettingsWO;
import org.knime.core.node.NotConfigurableException;
import org.knime.core.node.util.ColumnSelectionPanel;
import org.knime.core.node.util.DataValueColumnFilter;
import de.mpicbg.knime.hcs.core.math.Interval.Mode;
public class CreateIntervalNodeDialog extends NodeDialogPane {
// panel for "General Settings" tab
private final JPanel comp_mainPanel;
private CreateIntervalNodeSettings m_settings = null;
private final ColumnSelectionPanel comp_leftBoundColumn;
private final ColumnSelectionPanel comp_rightBoundColumn;
private final ColumnSelectionPanel comp_leftModeColumn;
private final ColumnSelectionPanel comp_rightModeColumn;
private final ColumnSelectionPanel comp_replaceColumnPanel;
private final JTextField comp_newColumnName;
private JRadioButton comp_useFixedModes;
private JRadioButton comp_useFlexibleModes;
private final JRadioButton comp_replaceColumnRadio;
private final JRadioButton comp_appendColumnRadio;
private ButtonGroup comp_fixedModesSelection;
private JRadioButton comp_inclBoth = new JRadioButton("[a;b]");
private JRadioButton comp_inclLeft = new JRadioButton("[a;b)");
private JRadioButton comp_inclRight = new JRadioButton("(a;b]");
private JRadioButton comp_inclNone = new JRadioButton("(a;b)");
/**
* constructor
* inits GUI
*/
@SuppressWarnings("unchecked")
public CreateIntervalNodeDialog() {
super();
m_settings = new CreateIntervalNodeSettings(CreateIntervalNodeModel.CFG_KEY);
/** INIT COMPONENTS **/
// main panel
comp_mainPanel = new JPanel(new BorderLayout());
// init left bound column combobox
comp_leftBoundColumn =new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), DoubleValue.class);
// init right bound column combobox
comp_rightBoundColumn = new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), DoubleValue.class);
DataValueColumnFilter columnFilter = new DataValueColumnFilter(BooleanValue.class, IntValue.class);
// init left mode column combobox
comp_leftModeColumn = new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), columnFilter, true);
// init right mode column combobox
comp_rightModeColumn = new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), columnFilter, true);
JPanel northPanel = new JPanel();
northPanel.setLayout(new BoxLayout(northPanel, BoxLayout.Y_AXIS));
JPanel southPanel = new JPanel(new GridBagLayout());
JPanel centerPanel = new JPanel(new GridBagLayout());
comp_useFixedModes = new JRadioButton("set include/exclude flags manually");
comp_useFlexibleModes = new JRadioButton("use columns for include/exclude flags");
ButtonGroup group = new ButtonGroup();
group.add(comp_useFlexibleModes);
group.add(comp_useFixedModes);
JPanel flexibleModesPanel = new JPanel();
flexibleModesPanel.setLayout(new BoxLayout(flexibleModesPanel, BoxLayout.Y_AXIS));
flexibleModesPanel.setBorder(BorderFactory.createTitledBorder(""));
JPanel fixedModesPanel = new JPanel();
fixedModesPanel.setLayout(new BoxLayout(fixedModesPanel, BoxLayout.Y_AXIS));
fixedModesPanel.setBorder(BorderFactory.createTitledBorder(""));
comp_fixedModesSelection = new ButtonGroup();
comp_fixedModesSelection.add(comp_inclBoth);
comp_fixedModesSelection.add(comp_inclLeft);
comp_fixedModesSelection.add(comp_inclRight);
comp_fixedModesSelection.add(comp_inclNone);
comp_useFixedModes.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
enablePanel(fixedModesPanel, e.getStateChange() == ItemEvent.SELECTED);
}
});
comp_useFlexibleModes.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
enablePanel(flexibleModesPanel, e.getStateChange() == ItemEvent.SELECTED);
}
});
comp_leftBoundColumn.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
m_settings.setLeftBoundColumn(comp_leftBoundColumn.getSelectedColumn());
}
});
comp_rightBoundColumn.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
m_settings.setRightBoundColumn(comp_rightBoundColumn.getSelectedColumn());
}
});
comp_leftModeColumn.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
m_settings.setLeftModeColumn(comp_leftModeColumn.getSelectedColumn());
}
});
comp_rightModeColumn.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
m_settings.setRightModeColumn(comp_rightModeColumn.getSelectedColumn());
}
});
comp_fixedModesSelection.setSelected(comp_inclLeft.getModel(), true);
// components for south panel
ButtonGroup bg = new ButtonGroup();
comp_replaceColumnRadio = new JRadioButton("Replace");
comp_appendColumnRadio = new JRadioButton("Append");
bg.add(comp_replaceColumnRadio);
bg.add(comp_appendColumnRadio);
comp_replaceColumnPanel = new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), DataValue.class);
comp_newColumnName = new JTextField(20);
comp_newColumnName.setText(CreateIntervalNodeSettings.CFG_OUT_COLUMN_NAME_DFT);
// usage of lambda expressions
/*comp_replaceColumnRadio.addItemListener(e -> comp_replaceColumnPanel.setEnabled(comp_replaceColumnRadio.isSelected()));
comp_appendColumnRadio.addItemListener(e -> comp_newColumnName.setEnabled(comp_appendColumnRadio.isSelected()));*/
comp_replaceColumnRadio.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
comp_replaceColumnPanel.setEnabled(e.getStateChange() == ItemEvent.SELECTED);
}
});
comp_appendColumnRadio.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
comp_newColumnName.setEnabled(e.getStateChange() == ItemEvent.SELECTED);
}
});
bg.setSelected(comp_appendColumnRadio.getModel(), true);
comp_appendColumnRadio.setSelected(true);
// initial selection / deselection (as no event is fired for first set selected)
comp_replaceColumnPanel.setEnabled(false);
comp_newColumnName.setEnabled(true);
/** LAYOUT COMPONENTS **/
northPanel.add(comp_leftBoundColumn);
northPanel.add(comp_rightBoundColumn);
GridBagConstraints c = new GridBagConstraints();
c.anchor = GridBagConstraints.NORTHWEST;
c.gridx = 0;
c.gridy = 0;
c.weightx = 1;
c.fill = GridBagConstraints.HORIZONTAL;
centerPanel.add(comp_useFixedModes, c);
c.gridx = 1;
centerPanel.add(comp_useFlexibleModes, c);
fixedModesPanel.add(comp_inclBoth);
fixedModesPanel.add(comp_inclLeft);
fixedModesPanel.add(comp_inclRight);
fixedModesPanel.add(comp_inclNone);
c.gridx = 0;
c.gridy = 1;
c.weighty = 1;
c.fill = GridBagConstraints.BOTH;
centerPanel.add(fixedModesPanel, c);
flexibleModesPanel.add(new JLabel("include left bound?"));
flexibleModesPanel.add(comp_leftModeColumn);
flexibleModesPanel.add(new JLabel("include right bound?"));
flexibleModesPanel.add(comp_rightModeColumn);
c.gridx = 1;
centerPanel.add(flexibleModesPanel, c);
// south panel
c = new GridBagConstraints();
c.anchor = GridBagConstraints.NORTHWEST;
c.gridx = 0;
c.gridy = 0;
c.weightx = 1;
c.fill = GridBagConstraints.HORIZONTAL;
southPanel.add(comp_replaceColumnRadio, c);
c.gridx = 1;
southPanel.add(comp_replaceColumnPanel, c);
c.gridx = 0;
c.gridy = 1;
southPanel.add(comp_appendColumnRadio,c);
c.gridx = 1;
southPanel.add(comp_newColumnName, c);
comp_mainPanel.add(centerPanel, BorderLayout.CENTER);
comp_mainPanel.add(northPanel, BorderLayout.NORTH);
comp_mainPanel.add(southPanel, BorderLayout.SOUTH);
this.addTab("General Settings", comp_mainPanel);
comp_useFixedModes.setSelected(true);
// initial selection / deselection (as no event is fired for first set selected)
enablePanel(flexibleModesPanel, false);
enablePanel(fixedModesPanel, true);
}
private void enablePanel(JPanel panel, boolean enable) {
panel.setEnabled(enable);
for (Component cp : panel.getComponents() ){
cp.setEnabled(enable);
}
}
@Override
protected void saveSettingsTo(NodeSettingsWO settings) throws InvalidSettingsException {
// sync GUI-settings to model
m_settings.setModeColumnsFlag(comp_useFlexibleModes.isSelected());
if(comp_inclBoth.isSelected())
m_settings.setFixedMode(Mode.INCL_BOTH);
if(comp_inclLeft.isSelected())
m_settings.setFixedMode(Mode.INCL_LEFT);
if(comp_inclRight.isSelected())
m_settings.setFixedMode(Mode.INCL_RIGHT);
if(comp_inclNone.isSelected())
m_settings.setFixedMode(Mode.INCL_NONE);
if(m_settings.useModeColumns()) {
if(m_settings.getLeftModeColumn() == null || m_settings.getRightModeColumn() == null)
throw new InvalidSettingsException("No mode columns selected.\nEnable fixed mode usage if no mode columns are available, otherwise select valid columns");
}
boolean appendColumn = comp_newColumnName.isEnabled();
m_settings.setCreateColumnFlag(appendColumn);
if(appendColumn)
m_settings.setOutColumnName(comp_newColumnName.getText());
else
m_settings.setOutColumnName(comp_replaceColumnPanel.getSelectedColumn());
// might be save settings for model?
m_settings.saveSettingsTo(settings);
}
@Override
protected void loadSettingsFrom(NodeSettingsRO settings, DataTableSpec[] specs) throws NotConfigurableException {
m_settings.loadSettingsForDialog(settings, specs);
// update components
updateComponents(specs[0]);
}
private void updateComponents(DataTableSpec spec) throws NotConfigurableException {
// update left/right bound column combobox
String leftBoundSelected = null;
String rightBoundSelected = null;
String leftModeColumnSelected = null;
String rightModeColumnSelected = null;
for (DataColumnSpec colSpec : spec) {
DataType dType = colSpec.getType();
String columnName = colSpec.getName();
if (dType.isCompatible(DoubleValue.class)) {
if(columnName.equals(m_settings.getLeftBoundColumn()))
leftBoundSelected = columnName;
if(columnName.equals(m_settings.getRightBoundColumn()))
rightBoundSelected = columnName;
}
if(dType.isCompatible(BooleanValue.class) || dType.isCompatible(IntValue.class)) {
if(columnName.equals(m_settings.getLeftModeColumn()))
leftModeColumnSelected = columnName;
if(columnName.equals(m_settings.getRightModeColumn()))
rightModeColumnSelected = columnName;
}
}
comp_leftBoundColumn.update(spec, leftBoundSelected);
comp_rightBoundColumn.update(spec, rightBoundSelected);;
comp_leftModeColumn.update(spec, leftModeColumnSelected);
comp_rightModeColumn.update(spec, rightModeColumnSelected);
boolean appendColumn = m_settings.createNewColumn();
String appendColumnName = appendColumn ?
m_settings.getOutColumnName() : CreateIntervalNodeSettings.CFG_OUT_COLUMN_NAME_DFT;
comp_newColumnName.setText(appendColumnName);
String replaceColumnName = appendColumn ? null : m_settings.getOutColumnName();
comp_replaceColumnPanel.update(spec, replaceColumnName, false, true);
if(appendColumn)
comp_appendColumnRadio.setSelected(true);
else
comp_replaceColumnRadio.setSelected(true);
boolean useModeColumns = m_settings.useModeColumns();
if(useModeColumns)
comp_useFlexibleModes.setSelected(true);
else
comp_useFixedModes.setSelected(true);
}
}
|
de.mpicbg.knime.hcs.base/src/de/mpicbg/knime/hcs/base/nodes/manip/col/createinterval/CreateIntervalNodeDialog.java
|
package de.mpicbg.knime.hcs.base.nodes.manip.col.createinterval;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import javax.swing.BorderFactory;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JTextField;
import org.knime.core.data.BooleanValue;
import org.knime.core.data.DataColumnSpec;
import org.knime.core.data.DataTableSpec;
import org.knime.core.data.DataType;
import org.knime.core.data.DataValue;
import org.knime.core.data.DoubleValue;
import org.knime.core.data.IntValue;
import org.knime.core.node.InvalidSettingsException;
import org.knime.core.node.NodeDialogPane;
import org.knime.core.node.NodeSettingsRO;
import org.knime.core.node.NodeSettingsWO;
import org.knime.core.node.NotConfigurableException;
import org.knime.core.node.util.ColumnSelectionPanel;
import org.knime.core.node.util.DataValueColumnFilter;
import de.mpicbg.knime.hcs.core.math.Interval.Mode;
public class CreateIntervalNodeDialog extends NodeDialogPane {
// panel for "General Settings" tab
private final JPanel comp_mainPanel;
private CreateIntervalNodeSettings m_settings = null;
private final ColumnSelectionPanel comp_leftBoundColumn;
private final ColumnSelectionPanel comp_rightBoundColumn;
private final ColumnSelectionPanel comp_leftModeColumn;
private final ColumnSelectionPanel comp_rightModeColumn;
private final ColumnSelectionPanel comp_replaceColumnPanel;
private final JTextField comp_newColumnName;
private JRadioButton comp_useFixedModes;
private JRadioButton comp_useFlexibleModes;
private final JRadioButton comp_replaceColumnRadio;
private final JRadioButton comp_appendColumnRadio;
private ButtonGroup comp_fixedModesSelection;
private JRadioButton comp_inclBoth = new JRadioButton("[a;b]");
private JRadioButton comp_inclLeft = new JRadioButton("[a;b)");
private JRadioButton comp_inclRight = new JRadioButton("(a;b]");
private JRadioButton comp_inclNone = new JRadioButton("(a;b)");
/**
* constructor
* inits GUI
*/
@SuppressWarnings("unchecked")
public CreateIntervalNodeDialog() {
super();
m_settings = new CreateIntervalNodeSettings(CreateIntervalNodeModel.CFG_KEY);
/** INIT COMPONENTS **/
// main panel
comp_mainPanel = new JPanel(new BorderLayout());
// init left bound column combobox
comp_leftBoundColumn =new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), DoubleValue.class);
// init right bound column combobox
comp_rightBoundColumn = new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), DoubleValue.class);
DataValueColumnFilter columnFilter = new DataValueColumnFilter(BooleanValue.class, IntValue.class);
// init left mode column combobox
comp_leftModeColumn = new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), columnFilter, true);
// init right mode column combobox
comp_rightModeColumn = new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), columnFilter, true);
JPanel northPanel = new JPanel();
northPanel.setLayout(new BoxLayout(northPanel, BoxLayout.Y_AXIS));
JPanel southPanel = new JPanel(new GridBagLayout());
JPanel centerPanel = new JPanel(new GridBagLayout());
comp_useFixedModes = new JRadioButton("set include/exclude flags manually");
comp_useFlexibleModes = new JRadioButton("use columns for include/exclude flags");
ButtonGroup group = new ButtonGroup();
group.add(comp_useFlexibleModes);
group.add(comp_useFixedModes);
JPanel flexibleModesPanel = new JPanel();
flexibleModesPanel.setLayout(new BoxLayout(flexibleModesPanel, BoxLayout.Y_AXIS));
flexibleModesPanel.setBorder(BorderFactory.createTitledBorder(""));
JPanel fixedModesPanel = new JPanel();
fixedModesPanel.setLayout(new BoxLayout(fixedModesPanel, BoxLayout.Y_AXIS));
fixedModesPanel.setBorder(BorderFactory.createTitledBorder(""));
comp_fixedModesSelection = new ButtonGroup();
comp_fixedModesSelection.add(comp_inclBoth);
comp_fixedModesSelection.add(comp_inclLeft);
comp_fixedModesSelection.add(comp_inclRight);
comp_fixedModesSelection.add(comp_inclNone);
comp_useFixedModes.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
enablePanel(fixedModesPanel, e.getStateChange() == ItemEvent.SELECTED);
}
});
comp_useFlexibleModes.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
enablePanel(flexibleModesPanel, e.getStateChange() == ItemEvent.SELECTED);
}
});
comp_leftBoundColumn.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
m_settings.setLeftBoundColumn(comp_leftBoundColumn.getSelectedColumn());
}
});
comp_rightBoundColumn.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
m_settings.setRightBoundColumn(comp_rightBoundColumn.getSelectedColumn());
}
});
comp_leftModeColumn.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
m_settings.setLeftModeColumn(comp_leftModeColumn.getSelectedColumn());
}
});
comp_rightModeColumn.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
m_settings.setRightModeColumn(comp_rightModeColumn.getSelectedColumn());
}
});
comp_fixedModesSelection.setSelected(comp_inclLeft.getModel(), true);
// components for south panel
ButtonGroup bg = new ButtonGroup();
comp_replaceColumnRadio = new JRadioButton("Replace");
comp_appendColumnRadio = new JRadioButton("Append");
bg.add(comp_replaceColumnRadio);
bg.add(comp_appendColumnRadio);
comp_replaceColumnPanel = new ColumnSelectionPanel(BorderFactory.createEmptyBorder(), DataValue.class);
comp_newColumnName = new JTextField(20);
comp_newColumnName.setText(CreateIntervalNodeSettings.CFG_OUT_COLUMN_NAME_DFT);
// usage of lambda expressions
/*comp_replaceColumnRadio.addItemListener(e -> comp_replaceColumnPanel.setEnabled(comp_replaceColumnRadio.isSelected()));
comp_appendColumnRadio.addItemListener(e -> comp_newColumnName.setEnabled(comp_appendColumnRadio.isSelected()));*/
comp_replaceColumnRadio.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
comp_replaceColumnPanel.setEnabled(e.getStateChange() == ItemEvent.SELECTED);
}
});
comp_appendColumnRadio.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
comp_newColumnName.setEnabled(e.getStateChange() == ItemEvent.SELECTED);
}
});
bg.setSelected(comp_appendColumnRadio.getModel(), true);
comp_appendColumnRadio.setSelected(true);
// initial selection / deselection (as no event is fired for first set selected)
comp_replaceColumnPanel.setEnabled(false);
comp_newColumnName.setEnabled(true);
/** LAYOUT COMPONENTS **/
northPanel.add(comp_leftBoundColumn);
northPanel.add(comp_rightBoundColumn);
GridBagConstraints c = new GridBagConstraints();
c.anchor = GridBagConstraints.NORTHWEST;
c.gridx = 0;
c.gridy = 0;
c.weightx = 1;
c.fill = GridBagConstraints.HORIZONTAL;
centerPanel.add(comp_useFixedModes, c);
c.gridx = 1;
centerPanel.add(comp_useFlexibleModes, c);
fixedModesPanel.add(comp_inclBoth);
fixedModesPanel.add(comp_inclLeft);
fixedModesPanel.add(comp_inclRight);
fixedModesPanel.add(comp_inclNone);
c.gridx = 0;
c.gridy = 1;
c.weighty = 1;
c.fill = GridBagConstraints.BOTH;
centerPanel.add(fixedModesPanel, c);
flexibleModesPanel.add(new JLabel("include left bound?"));
flexibleModesPanel.add(comp_leftModeColumn);
flexibleModesPanel.add(new JLabel("include right bound?"));
flexibleModesPanel.add(comp_rightModeColumn);
c.gridx = 1;
centerPanel.add(flexibleModesPanel, c);
// south panel
c = new GridBagConstraints();
c.anchor = GridBagConstraints.NORTHWEST;
c.gridx = 0;
c.gridy = 0;
c.weightx = 1;
c.fill = GridBagConstraints.HORIZONTAL;
southPanel.add(comp_replaceColumnRadio, c);
c.gridx = 1;
southPanel.add(comp_replaceColumnPanel, c);
c.gridx = 0;
c.gridy = 1;
southPanel.add(comp_appendColumnRadio,c);
c.gridx = 1;
southPanel.add(comp_newColumnName, c);
comp_mainPanel.add(centerPanel, BorderLayout.CENTER);
comp_mainPanel.add(northPanel, BorderLayout.NORTH);
comp_mainPanel.add(southPanel, BorderLayout.SOUTH);
this.addTab("General Settings", comp_mainPanel);
comp_useFixedModes.setSelected(true);
// initial selection / deselection (as no event is fired for first set selected)
enablePanel(flexibleModesPanel, false);
enablePanel(fixedModesPanel, true);
}
private void enablePanel(JPanel panel, boolean enable) {
panel.setEnabled(enable);
for (Component cp : panel.getComponents() ){
cp.setEnabled(enable);
}
}
@Override
protected void saveSettingsTo(NodeSettingsWO settings) throws InvalidSettingsException {
// sync GUI-settings to model
m_settings.setModeColumnsFlag(comp_useFlexibleModes.isSelected());
if(comp_inclBoth.isSelected())
m_settings.setFixedMode(Mode.INCL_BOTH);
if(comp_inclLeft.isSelected())
m_settings.setFixedMode(Mode.INCL_LEFT);
if(comp_inclRight.isSelected())
m_settings.setFixedMode(Mode.INCL_RIGHT);
if(comp_inclNone.isSelected())
m_settings.setFixedMode(Mode.INCL_NONE);
if(m_settings.useModeColumns()) {
if(m_settings.getLeftModeColumn() == null || m_settings.getRightModeColumn() == null)
throw new InvalidSettingsException("No mode columns selected.\nEnable fixed mode usage if no mode columns are available, otherwise select valid columns");
}
boolean appendColumn = comp_newColumnName.isEnabled();
m_settings.setCreateColumnFlag(appendColumn);
if(appendColumn)
m_settings.setOutColumnName(comp_newColumnName.getText());
else
m_settings.setOutColumnName(comp_replaceColumnPanel.getSelectedColumn());
// might be save settings for model?
m_settings.saveSettingsTo(settings);
}
@Override
protected void loadSettingsFrom(NodeSettingsRO settings, DataTableSpec[] specs) throws NotConfigurableException {
m_settings.loadSettingsForDialog(settings, specs);
// update components
updateComponents(specs[0]);
}
private void updateComponents(DataTableSpec spec) throws NotConfigurableException {
// update left/right bound column combobox
String leftBoundSelected = null;
String rightBoundSelected = null;
String leftModeColumnSelected = null;
String rightModeColumnSelected = null;
for (DataColumnSpec colSpec : spec) {
DataType dType = colSpec.getType();
String columnName = colSpec.getName();
if (dType.isCompatible(DoubleValue.class)) {
if(columnName.equals(m_settings.getLeftBoundColumn()))
leftBoundSelected = columnName;
if(columnName.equals(m_settings.getRightBoundColumn()))
rightBoundSelected = columnName;
}
if(dType.isCompatible(BooleanValue.class) || dType.isCompatible(IntValue.class)) {
if(columnName.equals(m_settings.getLeftModeColumn()))
leftModeColumnSelected = columnName;
if(columnName.equals(m_settings.getRightModeColumn()))
rightModeColumnSelected = columnName;
}
}
comp_leftBoundColumn.update(spec, leftBoundSelected);
comp_rightBoundColumn.update(spec, rightBoundSelected);;
comp_leftModeColumn.update(spec, leftModeColumnSelected);
comp_rightModeColumn.update(spec, rightModeColumnSelected);
boolean appendColumn = m_settings.createNewColumn();
String appendColumnName = appendColumn ? m_settings.getOutColumnName() : "";
comp_newColumnName.setText(appendColumnName);
String replaceColumnName = appendColumn ? null : m_settings.getOutColumnName();
comp_replaceColumnPanel.update(spec, replaceColumnName, false, true);
if(appendColumn)
comp_appendColumnRadio.setSelected(true);
else
comp_replaceColumnRadio.setSelected(true);
boolean useModeColumns = m_settings.useModeColumns();
if(useModeColumns)
comp_useFlexibleModes.setSelected(true);
else
comp_useFixedModes.setSelected(true);
}
}
|
Create Interval: fix default value of append output column
|
de.mpicbg.knime.hcs.base/src/de/mpicbg/knime/hcs/base/nodes/manip/col/createinterval/CreateIntervalNodeDialog.java
|
Create Interval: fix default value of append output column
|
|
Java
|
mit
|
86d911e16f5e9e27314493ef98556cb1e65e016c
| 0
|
cinnober/msgcodec
|
/*
* Copyright (c) 2013 Cinnober Financial Technology AB, Stockholm,
* Sweden. All rights reserved.
*
* This software is the confidential and proprietary information of
* Cinnober Financial Technology AB, Stockholm, Sweden. You shall not
* disclose such Confidential Information and shall use it only in
* accordance with the terms of the license agreement you entered into
* with Cinnober.
*
* Cinnober makes no representations or warranties about the suitability
* of the software, either expressed or implied, including, but not limited
* to, the implied warranties of merchantibility, fitness for a particular
* purpose, or non-infringement. Cinnober shall not be liable for any
* damages suffered by licensee as a result of using, modifying, or
* distributing this software or its derivatives.
*/
package com.cinnober.msgcodec.json;
import java.io.IOException;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import com.cinnober.msgcodec.Accessor;
import com.cinnober.msgcodec.EnumSymbols;
import com.cinnober.msgcodec.Epoch;
import com.cinnober.msgcodec.Factory;
import com.cinnober.msgcodec.FieldDef;
import com.cinnober.msgcodec.GroupDef;
import com.cinnober.msgcodec.TypeDef;
import com.cinnober.msgcodec.TypeDef.Symbol;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
/**
* @author Mikael Brannstrom
*
*/
abstract class JsonValueHandler<T> {
static final String TYPE_FIELD = "$type";
static final Int8Handler INT8 = new Int8Handler();
static final Int16Handler INT16 = new Int16Handler();
static final Int32Handler INT32 = new Int32Handler();
static final Int64Handler INT64 = new Int64Handler();
static final UInt8Handler UINT8 = new UInt8Handler();
static final UInt16Handler UINT16 = new UInt16Handler();
static final UInt32Handler UINT32 = new UInt32Handler();
static final UInt64Handler UINT64 = new UInt64Handler();
static final StringHandler STRING = new StringHandler();
static final BinaryHandler BINARY = new BinaryHandler();
static final BooleanHandler BOOLEAN = new BooleanHandler();
static final DecimalHandler DECIMAL = new DecimalHandler();
static final BigDecimalHandler BIGDECIMAL = new BigDecimalHandler();
static final BigIntHandler BIGINT = new BigIntHandler();
static final Float32Handler FLOAT32 = new Float32Handler();
static final Float64Handler FLOAT64 = new Float64Handler();
abstract void writeValue(T value, JsonGenerator g) throws IOException;
abstract T readValue(JsonParser p) throws IOException;
static class Int8Handler extends JsonValueHandler<Byte> {
@Override
void writeValue(Byte value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Byte readValue(JsonParser p) throws IOException {
return (byte) p.getValueAsInt();
}
}
static class Int16Handler extends JsonValueHandler<Short> {
@Override
void writeValue(Short value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Short readValue(JsonParser p) throws IOException {
return (short) p.getValueAsInt();
}
}
static class Int32Handler extends JsonValueHandler<Integer> {
@Override
void writeValue(Integer value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Integer readValue(JsonParser p) throws IOException {
return p.getValueAsInt();
}
}
static class Int64Handler extends JsonValueHandler<Long> {
@Override
void writeValue(Long value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Long readValue(JsonParser p) throws IOException {
return p.getValueAsLong();
}
}
static class UInt8Handler extends JsonValueHandler<Byte> {
@Override
void writeValue(Byte value, JsonGenerator g) throws IOException {
g.writeNumber(value.byteValue() & 0xff);
}
@Override
Byte readValue(JsonParser p) throws IOException {
return (byte) p.getValueAsInt();
}
}
static class UInt16Handler extends JsonValueHandler<Short> {
@Override
void writeValue(Short value, JsonGenerator g) throws IOException {
g.writeNumber(value.shortValue() & 0xffff);
}
@Override
Short readValue(JsonParser p) throws IOException {
return (short) p.getValueAsInt();
}
}
static class UInt32Handler extends JsonValueHandler<Integer> {
@Override
void writeValue(Integer value, JsonGenerator g) throws IOException {
g.writeNumber(value.intValue() & 0xffffffffL);
}
@Override
Integer readValue(JsonParser p) throws IOException {
return (int) p.getValueAsLong();
}
}
static class UInt64Handler extends JsonValueHandler<Long> {
@Override
void writeValue(Long value, JsonGenerator g) throws IOException {
long v = value.longValue();
if (v < 0) {
// TODO: write via BigInteger
g.writeNumber(v);
} else {
g.writeNumber(v);
}
}
@Override
Long readValue(JsonParser p) throws IOException {
// TODO: read via BigInteger?
return p.getValueAsLong();
}
}
static class StringHandler extends JsonValueHandler<String> {
@Override
void writeValue(String value, JsonGenerator g) throws IOException {
g.writeString(value);
}
@Override
String readValue(JsonParser p) throws IOException {
return p.getValueAsString();
}
}
static class BinaryHandler extends JsonValueHandler<byte[]> {
@Override
void writeValue(byte[] value, JsonGenerator g) throws IOException {
g.writeBinary(value);
}
@Override
byte[] readValue(JsonParser p) throws IOException {
return p.getBinaryValue();
}
}
static class BooleanHandler extends JsonValueHandler<Boolean> {
@Override
void writeValue(Boolean value, JsonGenerator g) throws IOException {
g.writeBoolean(value);
}
@Override
Boolean readValue(JsonParser p) throws IOException {
return p.getBooleanValue();
}
}
static class DecimalHandler extends JsonValueHandler<BigDecimal> {
@Override
void writeValue(BigDecimal value, JsonGenerator g) throws IOException {
g.writeNumber(value); // TODO: validate range
}
@Override
BigDecimal readValue(JsonParser p) throws IOException {
return p.getDecimalValue(); // TODO: validate range
}
}
static class BigDecimalHandler extends JsonValueHandler<BigDecimal> {
@Override
void writeValue(BigDecimal value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
BigDecimal readValue(JsonParser p) throws IOException {
return p.getDecimalValue();
}
}
static class BigIntHandler extends JsonValueHandler<BigInteger> {
@Override
void writeValue(BigInteger value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
BigInteger readValue(JsonParser p) throws IOException {
return p.getBigIntegerValue();
}
}
static class Float32Handler extends JsonValueHandler<Float> {
@Override
void writeValue(Float value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Float readValue(JsonParser p) throws IOException {
return p.getFloatValue();
}
}
static class Float64Handler extends JsonValueHandler<Double> {
@Override
void writeValue(Double value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Double readValue(JsonParser p) throws IOException {
return p.getDoubleValue();
}
}
static abstract class TimeHandler<T> extends JsonValueHandler<T> {
private final Epoch epoch;
private final TimeUnit unit;
public TimeHandler(TypeDef.Time type) {
this.epoch = type.getEpoch();
this.unit = type.getUnit();
}
/** Convert the value to a long value for the specified epoch and time unit. */
protected abstract long convertToLong(T value);
/** Convert the value from a long value for the specified epoch and time unit. */
protected abstract T convertFromLong(long value);
@Override
void writeValue(T value, JsonGenerator g) throws IOException {
long timeValue = convertToLong(value);
// TODO: format long to string
String timeStr = "TODO-TIME:" + Long.toString(timeValue);
g.writeString(timeStr);
}
@Override
T readValue(JsonParser p) throws IOException {
String s = p.getText();
long timeValue = Long.parseLong(s.substring("TODO-TIME:".length()));
return convertFromLong(timeValue);
}
}
static class IntTimeHandler extends TimeHandler<Integer> {
public IntTimeHandler(TypeDef.Time type) {
super(type);
}
@Override
protected long convertToLong(Integer value) {
return value;
}
@Override
protected Integer convertFromLong(long value) {
return (int)value;
}
}
static class LongTimeHandler extends TimeHandler<Long> {
public LongTimeHandler(TypeDef.Time type) {
super(type);
}
@Override
protected long convertToLong(Long value) {
return value;
}
@Override
protected Long convertFromLong(long value) {
return value;
}
}
private static long getTimeInMillis(TimeUnit unit) {
switch (unit) {
case MILLISECONDS:
return 1;
case SECONDS:
return 1000;
case MINUTES:
return 60*1000;
case HOURS:
return 60*60*1000;
case DAYS:
return 24*60*60*1000;
default:
throw new IllegalArgumentException("Date does not support " + unit);
}
}
private static long getEpochOffset(Epoch epoch) {
switch (epoch) {
case UNIX:
return 0;
case Y2K:
return 946706400000L;
case MIDNIGHT:
return 0;
default:
throw new IllegalArgumentException("Date does not support " + epoch);
}
}
static class DateTimeHandler extends TimeHandler<Date> {
private final long timeUnitInMillis;
private final long epochOffset;
public DateTimeHandler(TypeDef.Time type) {
super(type);
timeUnitInMillis = getTimeInMillis(type.getUnit());
epochOffset = getEpochOffset(type.getEpoch());
}
@Override
protected long convertToLong(Date value) {
return (value.getTime()-epochOffset)/timeUnitInMillis;
}
@Override
protected Date convertFromLong(long value) {
return new Date(value*timeUnitInMillis+epochOffset);
}
}
static class EnumHandler<E extends Enum<E>> extends JsonValueHandler<E> {
private final EnumSymbols<E> enumSymbols;
public EnumHandler(TypeDef.Enum typeDef, Class<E> enumClass) {
this.enumSymbols = new EnumSymbols<E>(typeDef, enumClass);
}
@Override
void writeValue(E value, JsonGenerator g) throws IOException {
Symbol symbol = enumSymbols.getSymbol(value);
if (symbol == null) {
throw new IOException("Not a valid enum: " + value);
}
g.writeString(symbol.getName());
}
@Override
E readValue(JsonParser p) throws IOException {
String str = p.getText();
E value = enumSymbols.getEnum(str);
if (value == null) {
throw new IOException("Not a valid symbol: " + str);
}
return value;
}
}
static class IntEnumHandler extends JsonValueHandler<Integer> {
private final Map<String, Integer> idByName;
private final Map<Integer, String> nameById;
public IntEnumHandler(TypeDef.Enum typeDef) {
idByName = new HashMap<>(typeDef.getSymbols().size() * 2);
nameById = new HashMap<>(typeDef.getSymbols().size() * 2);
for (Symbol symbol : typeDef.getSymbols()) {
idByName.put(symbol.getName(), symbol.getId());
nameById.put(symbol.getId(), symbol.getName());
}
}
@Override
void writeValue(Integer value, JsonGenerator g) throws IOException {
String name = nameById.get(value);
if (name == null) {
throw new IOException("Not a valid enum: " + value);
}
g.writeString(name);
}
@Override
Integer readValue(JsonParser p) throws IOException {
String str = p.getText();
Integer value = idByName.get(str);
if (value == null) {
throw new IOException("Not a valid symbol: " + str);
}
return value;
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
static class CollectionSequenceHandler extends JsonValueHandler<Collection> {
private final JsonValueHandler componentHandler;
public CollectionSequenceHandler(JsonValueHandler componentHandler) {
this.componentHandler = componentHandler;
}
@Override
void writeValue(Collection list, JsonGenerator g) throws IOException {
g.writeStartArray();
for (Object value : list) {
componentHandler.writeValue(value, g);
}
g.writeEndArray();
}
@Override
Collection readValue(JsonParser p) throws IOException {
Collection list = new ArrayList();
// start array already consumed
while (p.nextToken() != JsonToken.END_ARRAY) {
list.add(componentHandler.readValue(p));
}
return list;
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
static class ArraySequenceHandler extends JsonValueHandler<Object> {
private final JsonValueHandler componentHandler;
private final Class<?> componentType;
public ArraySequenceHandler(JsonValueHandler componentHandler, Class<?> componentType) {
this.componentHandler = componentHandler;
this.componentType = componentType;
}
@Override
void writeValue(Object array, JsonGenerator g) throws IOException {
g.writeStartArray();
int length = Array.getLength(array);
for (int i=0; i<length; i++) {
Object value = Array.get(array, i);
componentHandler.writeValue(value, g);
}
g.writeEndArray();
}
@Override
Object readValue(JsonParser p) throws IOException {
Collection list = new LinkedList();
// start array already consumed
while (p.nextToken() != JsonToken.END_ARRAY) {
list.add(componentHandler.readValue(p));
}
Object array = Array.newInstance(componentType, list.size());
int i=0;
for (Object value : list) {
Array.set(array, i++, value);
}
return array;
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
static class FieldHandler {
private final String name;
private final Accessor accessor;
private final JsonValueHandler valueHandler;
FieldHandler(FieldDef field, JsonValueHandler valueHandler) {
this.name = field.getName();
this.accessor = field.getBinding().getAccessor();
this.valueHandler = valueHandler;
}
void writeValue(Object group, JsonGenerator g) throws IOException {
Object value = accessor.getValue(group);
if (value != null) {
g.writeFieldName(name);
valueHandler.writeValue(value, g);
}
}
void readValue(Object group, JsonParser p) throws IOException {
Object value = valueHandler.readValue(p);
accessor.setValue(group, value);
}
}
@SuppressWarnings({"rawtypes"})
static class StaticGroupHandler extends JsonValueHandler<Object> {
private final String name;
private final Factory factory;
private Map<String, FieldHandler> fields;
StaticGroupHandler(GroupDef group) {
this.name = group.getName();
this.factory = group.getBinding().getFactory();
}
void init(Map<String, FieldHandler> fields) {
this.fields = fields;
}
void writeValue(Object value, JsonGenerator g, boolean dynamic) throws IOException {
g.writeStartObject();
if (dynamic) {
g.writeFieldName(TYPE_FIELD);
g.writeString(name);
}
for (FieldHandler field : fields.values()) {
field.writeValue(value, g);
}
g.writeEndObject();
}
@Override
void writeValue(Object value, JsonGenerator g) throws IOException {
writeValue(value, g, false);
}
@Override
Object readValue(JsonParser p) throws IOException {
Object group = factory.newInstance();
readValue(group, p);
return group;
}
void readValue(Object group, JsonParser p) throws IOException {
// startObject has already been read
while (p.nextToken() == JsonToken.FIELD_NAME) {
String fieldName = p.getCurrentName();
if (p.nextToken() != JsonToken.VALUE_NULL) {
FieldHandler fieldHandler = fields.get(fieldName);
if (fieldHandler == null) {
throw new IOException("Unknown field: " + fieldName);
}
fieldHandler.readValue(group, p);
}
}
}
/**
* @return
*/
Map<String, FieldHandler> getFields() {
return fields;
}
}
static class DynamicGroupHandler extends JsonValueHandler<Object> {
private final JsonCodec jsonCodec;
DynamicGroupHandler(JsonCodec jsonCodec) {
this.jsonCodec = jsonCodec;
}
@Override
void writeValue(Object value, JsonGenerator g) throws IOException {
StaticGroupHandler groupHandler = jsonCodec.lookupGroupByValue(value);
if (groupHandler == null) {
throw new IOException("Cannot encode group (unknown type)");
}
groupHandler.writeValue(value, g, true);
}
@Override
Object readValue(JsonParser p) throws IOException {
if (p.nextToken() != JsonToken.FIELD_NAME || !p.getText().equals(TYPE_FIELD)) {
throw new IOException("Expected field " + TYPE_FIELD);
}
p.nextToken(); // field value
String groupName = p.getText();
StaticGroupHandler groupHandler = jsonCodec.lookupGroupByName(groupName);
if (groupHandler == null) {
throw new IOException("Unknown type: " + groupName);
}
return groupHandler.readValue(p);
}
}
}
|
msgcodec-json/src/main/java/com/cinnober/msgcodec/json/JsonValueHandler.java
|
/*
* Copyright (c) 2013 Cinnober Financial Technology AB, Stockholm,
* Sweden. All rights reserved.
*
* This software is the confidential and proprietary information of
* Cinnober Financial Technology AB, Stockholm, Sweden. You shall not
* disclose such Confidential Information and shall use it only in
* accordance with the terms of the license agreement you entered into
* with Cinnober.
*
* Cinnober makes no representations or warranties about the suitability
* of the software, either expressed or implied, including, but not limited
* to, the implied warranties of merchantibility, fitness for a particular
* purpose, or non-infringement. Cinnober shall not be liable for any
* damages suffered by licensee as a result of using, modifying, or
* distributing this software or its derivatives.
*/
package com.cinnober.msgcodec.json;
import java.io.IOException;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import com.cinnober.msgcodec.Accessor;
import com.cinnober.msgcodec.EnumSymbols;
import com.cinnober.msgcodec.Epoch;
import com.cinnober.msgcodec.Factory;
import com.cinnober.msgcodec.FieldDef;
import com.cinnober.msgcodec.GroupDef;
import com.cinnober.msgcodec.TypeDef;
import com.cinnober.msgcodec.TypeDef.Symbol;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
/**
* @author Mikael Brannstrom
*
*/
abstract class JsonValueHandler<T> {
static final String TYPE_FIELD = "$type";
static final Int8Handler INT8 = new Int8Handler();
static final Int16Handler INT16 = new Int16Handler();
static final Int32Handler INT32 = new Int32Handler();
static final Int64Handler INT64 = new Int64Handler();
static final UInt8Handler UINT8 = new UInt8Handler();
static final UInt16Handler UINT16 = new UInt16Handler();
static final UInt32Handler UINT32 = new UInt32Handler();
static final UInt64Handler UINT64 = new UInt64Handler();
static final StringHandler STRING = new StringHandler();
static final BinaryHandler BINARY = new BinaryHandler();
static final BooleanHandler BOOLEAN = new BooleanHandler();
static final DecimalHandler DECIMAL = new DecimalHandler();
static final BigDecimalHandler BIGDECIMAL = new BigDecimalHandler();
static final BigIntHandler BIGINT = new BigIntHandler();
static final Float32Handler FLOAT32 = new Float32Handler();
static final Float64Handler FLOAT64 = new Float64Handler();
abstract void writeValue(T value, JsonGenerator g) throws IOException;
abstract T readValue(JsonParser p) throws IOException;
static class Int8Handler extends JsonValueHandler<Byte> {
@Override
void writeValue(Byte value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Byte readValue(JsonParser p) throws IOException {
return (byte) p.getValueAsInt();
}
}
static class Int16Handler extends JsonValueHandler<Short> {
@Override
void writeValue(Short value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Short readValue(JsonParser p) throws IOException {
return (short) p.getValueAsInt();
}
}
static class Int32Handler extends JsonValueHandler<Integer> {
@Override
void writeValue(Integer value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Integer readValue(JsonParser p) throws IOException {
return p.getValueAsInt();
}
}
static class Int64Handler extends JsonValueHandler<Long> {
@Override
void writeValue(Long value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Long readValue(JsonParser p) throws IOException {
return p.getValueAsLong();
}
}
static class UInt8Handler extends JsonValueHandler<Byte> {
@Override
void writeValue(Byte value, JsonGenerator g) throws IOException {
g.writeNumber(value.byteValue() & 0xff);
}
@Override
Byte readValue(JsonParser p) throws IOException {
return (byte) p.getValueAsInt();
}
}
static class UInt16Handler extends JsonValueHandler<Short> {
@Override
void writeValue(Short value, JsonGenerator g) throws IOException {
g.writeNumber(value.shortValue() & 0xffff);
}
@Override
Short readValue(JsonParser p) throws IOException {
return (short) p.getValueAsInt();
}
}
static class UInt32Handler extends JsonValueHandler<Integer> {
@Override
void writeValue(Integer value, JsonGenerator g) throws IOException {
g.writeNumber(value.intValue() & 0xffffffffL);
}
@Override
Integer readValue(JsonParser p) throws IOException {
return (int) p.getValueAsLong();
}
}
static class UInt64Handler extends JsonValueHandler<Long> {
@Override
void writeValue(Long value, JsonGenerator g) throws IOException {
long v = value.longValue();
if (v < 0) {
// TODO: write via BigInteger
g.writeNumber(v);
} else {
g.writeNumber(v);
}
}
@Override
Long readValue(JsonParser p) throws IOException {
// TODO: read via BigInteger?
return p.getValueAsLong();
}
}
static class StringHandler extends JsonValueHandler<String> {
@Override
void writeValue(String value, JsonGenerator g) throws IOException {
g.writeString(value);
}
@Override
String readValue(JsonParser p) throws IOException {
return p.getValueAsString();
}
}
static class BinaryHandler extends JsonValueHandler<byte[]> {
@Override
void writeValue(byte[] value, JsonGenerator g) throws IOException {
g.writeBinary(value);
}
@Override
byte[] readValue(JsonParser p) throws IOException {
return p.getBinaryValue();
}
}
static class BooleanHandler extends JsonValueHandler<Boolean> {
@Override
void writeValue(Boolean value, JsonGenerator g) throws IOException {
g.writeBoolean(value);
}
@Override
Boolean readValue(JsonParser p) throws IOException {
return p.getBooleanValue();
}
}
static class DecimalHandler extends JsonValueHandler<BigDecimal> {
@Override
void writeValue(BigDecimal value, JsonGenerator g) throws IOException {
g.writeNumber(value); // TODO: validate range
}
@Override
BigDecimal readValue(JsonParser p) throws IOException {
return p.getDecimalValue(); // TODO: validate range
}
}
static class BigDecimalHandler extends JsonValueHandler<BigDecimal> {
@Override
void writeValue(BigDecimal value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
BigDecimal readValue(JsonParser p) throws IOException {
return p.getDecimalValue();
}
}
static class BigIntHandler extends JsonValueHandler<BigInteger> {
@Override
void writeValue(BigInteger value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
BigInteger readValue(JsonParser p) throws IOException {
return p.getBigIntegerValue();
}
}
static class Float32Handler extends JsonValueHandler<Float> {
@Override
void writeValue(Float value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Float readValue(JsonParser p) throws IOException {
return p.getFloatValue();
}
}
static class Float64Handler extends JsonValueHandler<Double> {
@Override
void writeValue(Double value, JsonGenerator g) throws IOException {
g.writeNumber(value);
}
@Override
Double readValue(JsonParser p) throws IOException {
return p.getDoubleValue();
}
}
static abstract class TimeHandler<T> extends JsonValueHandler<T> {
private final Epoch epoch;
private final TimeUnit unit;
public TimeHandler(TypeDef.Time type) {
this.epoch = type.getEpoch();
this.unit = type.getUnit();
}
/** Convert the value to a long value for the specified epoch and time unit. */
protected abstract long convertToLong(T value);
/** Convert the value from a long value for the specified epoch and time unit. */
protected abstract T convertFromLong(long value);
@Override
void writeValue(T value, JsonGenerator g) throws IOException {
long timeValue = convertToLong(value);
// TODO: format long to string
String timeStr = "TODO-TIME:" + Long.toString(timeValue);
g.writeString(timeStr);
}
@Override
T readValue(JsonParser p) throws IOException {
String s = p.getText();
long timeValue = Long.parseLong(s.substring("TODO-TIME:".length()));
return convertFromLong(timeValue);
}
}
static class IntTimeHandler extends TimeHandler<Integer> {
public IntTimeHandler(TypeDef.Time type) {
super(type);
}
@Override
protected long convertToLong(Integer value) {
return value;
}
@Override
protected Integer convertFromLong(long value) {
return (int)value;
}
}
static class LongTimeHandler extends TimeHandler<Long> {
public LongTimeHandler(TypeDef.Time type) {
super(type);
}
@Override
protected long convertToLong(Long value) {
return value;
}
@Override
protected Long convertFromLong(long value) {
return value;
}
}
private static long getTimeInMillis(TimeUnit unit) {
switch (unit) {
case MILLISECONDS:
return 1;
case SECONDS:
return 1000;
case MINUTES:
return 60*1000;
case HOURS:
return 60*60*1000;
case DAYS:
return 24*60*60*1000;
default:
throw new IllegalArgumentException("Date does not support " + unit);
}
}
private static long getEpochOffset(Epoch epoch) {
switch (epoch) {
case UNIX:
return 0;
case Y2K:
return 946706400000L;
case MIDNIGHT:
return 0;
default:
throw new IllegalArgumentException("Date does not support " + epoch);
}
}
static class DateTimeHandler extends TimeHandler<Date> {
private final long timeUnitInMillis;
private final long epochOffset;
public DateTimeHandler(TypeDef.Time type) {
super(type);
timeUnitInMillis = getTimeInMillis(type.getUnit());
epochOffset = getEpochOffset(type.getEpoch());
}
@Override
protected long convertToLong(Date value) {
return (value.getTime()-epochOffset)/timeUnitInMillis;
}
@Override
protected Date convertFromLong(long value) {
return new Date(value*timeUnitInMillis+epochOffset);
}
}
static class EnumHandler<E extends Enum<E>> extends JsonValueHandler<E> {
private final EnumSymbols<E> enumSymbols;
public EnumHandler(TypeDef.Enum typeDef, Class<E> enumClass) {
this.enumSymbols = new EnumSymbols<E>(typeDef, enumClass);
}
@Override
void writeValue(E value, JsonGenerator g) throws IOException {
Symbol symbol = enumSymbols.getSymbol(value);
if (symbol == null) {
throw new IOException("Not a valid enum: " + value);
}
g.writeString(symbol.getName());
}
@Override
E readValue(JsonParser p) throws IOException {
String str = p.getText();
E value = enumSymbols.getEnum(str);
if (value == null) {
throw new IOException("Not a valid symbol: " + str);
}
return value;
}
}
static class IntEnumHandler extends JsonValueHandler<Integer> {
private final Map<String, Integer> idByName;
private final Map<Integer, String> nameById;
public IntEnumHandler(TypeDef.Enum typeDef) {
idByName = new HashMap<>(typeDef.getSymbols().size() * 2);
nameById = new HashMap<>(typeDef.getSymbols().size() * 2);
for (Symbol symbol : typeDef.getSymbols()) {
idByName.put(symbol.getName(), symbol.getId());
nameById.put(symbol.getId(), symbol.getName());
}
}
@Override
void writeValue(Integer value, JsonGenerator g) throws IOException {
String name = nameById.get(value);
if (name == null) {
throw new IOException("Not a valid enum: " + value);
}
g.writeString(name);
}
@Override
Integer readValue(JsonParser p) throws IOException {
String str = p.getText();
Integer value = idByName.get(str);
if (value == null) {
throw new IOException("Not a valid symbol: " + str);
}
return value;
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
static class CollectionSequenceHandler extends JsonValueHandler<Collection> {
private final JsonValueHandler componentHandler;
public CollectionSequenceHandler(JsonValueHandler componentHandler) {
this.componentHandler = componentHandler;
}
@Override
void writeValue(Collection list, JsonGenerator g) throws IOException {
g.writeStartArray();
for (Object value : list) {
componentHandler.writeValue(value, g);
}
g.writeEndArray();
}
@Override
Collection readValue(JsonParser p) throws IOException {
Collection list = new ArrayList();
// start array already consumed
while (p.nextToken() != JsonToken.END_ARRAY) {
list.add(componentHandler.readValue(p));
}
return list;
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
static class ArraySequenceHandler extends JsonValueHandler<Object> {
private final JsonValueHandler componentHandler;
private final Class<?> componentType;
public ArraySequenceHandler(JsonValueHandler componentHandler, Class<?> componentType) {
this.componentHandler = componentHandler;
this.componentType = componentType;
}
@Override
void writeValue(Object array, JsonGenerator g) throws IOException {
g.writeStartArray();
int length = Array.getLength(array);
for (int i=0; i<length; i++) {
Object value = Array.get(array, i);
componentHandler.writeValue(value, g);
}
g.writeEndArray();
}
@Override
Object readValue(JsonParser p) throws IOException {
Collection list = new LinkedList();
// start array already consumed
while (p.nextToken() != JsonToken.END_ARRAY) {
list.add(componentHandler.readValue(p));
}
Object array = Array.newInstance(componentType, list.size());
int i=0;
for (Object value : list) {
Array.set(array, i, value);
}
return array;
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
static class FieldHandler {
private final String name;
private final Accessor accessor;
private final JsonValueHandler valueHandler;
FieldHandler(FieldDef field, JsonValueHandler valueHandler) {
this.name = field.getName();
this.accessor = field.getBinding().getAccessor();
this.valueHandler = valueHandler;
}
void writeValue(Object group, JsonGenerator g) throws IOException {
Object value = accessor.getValue(group);
if (value != null) {
g.writeFieldName(name);
valueHandler.writeValue(value, g);
}
}
void readValue(Object group, JsonParser p) throws IOException {
Object value = valueHandler.readValue(p);
accessor.setValue(group, value);
}
}
@SuppressWarnings({"rawtypes"})
static class StaticGroupHandler extends JsonValueHandler<Object> {
private final String name;
private final Factory factory;
private Map<String, FieldHandler> fields;
StaticGroupHandler(GroupDef group) {
this.name = group.getName();
this.factory = group.getBinding().getFactory();
}
void init(Map<String, FieldHandler> fields) {
this.fields = fields;
}
void writeValue(Object value, JsonGenerator g, boolean dynamic) throws IOException {
g.writeStartObject();
if (dynamic) {
g.writeFieldName(TYPE_FIELD);
g.writeString(name);
}
for (FieldHandler field : fields.values()) {
field.writeValue(value, g);
}
g.writeEndObject();
}
@Override
void writeValue(Object value, JsonGenerator g) throws IOException {
writeValue(value, g, false);
}
@Override
Object readValue(JsonParser p) throws IOException {
Object group = factory.newInstance();
readValue(group, p);
return group;
}
void readValue(Object group, JsonParser p) throws IOException {
// startObject has already been read
while (p.nextToken() == JsonToken.FIELD_NAME) {
String fieldName = p.getCurrentName();
if (p.nextToken() != JsonToken.VALUE_NULL) {
FieldHandler fieldHandler = fields.get(fieldName);
if (fieldHandler == null) {
throw new IOException("Unknown field: " + fieldName);
}
fieldHandler.readValue(group, p);
}
}
}
/**
* @return
*/
Map<String, FieldHandler> getFields() {
return fields;
}
}
static class DynamicGroupHandler extends JsonValueHandler<Object> {
private final JsonCodec jsonCodec;
DynamicGroupHandler(JsonCodec jsonCodec) {
this.jsonCodec = jsonCodec;
}
@Override
void writeValue(Object value, JsonGenerator g) throws IOException {
StaticGroupHandler groupHandler = jsonCodec.lookupGroupByValue(value);
if (groupHandler == null) {
throw new IOException("Cannot encode group (unknown type)");
}
groupHandler.writeValue(value, g, true);
}
@Override
Object readValue(JsonParser p) throws IOException {
if (p.nextToken() != JsonToken.FIELD_NAME || !p.getText().equals(TYPE_FIELD)) {
throw new IOException("Expected field " + TYPE_FIELD);
}
p.nextToken(); // field value
String groupName = p.getText();
StaticGroupHandler groupHandler = jsonCodec.lookupGroupByName(groupName);
if (groupHandler == null) {
throw new IOException("Unknown type: " + groupName);
}
return groupHandler.readValue(p);
}
}
}
|
Bug-fix: parse arrays
|
msgcodec-json/src/main/java/com/cinnober/msgcodec/json/JsonValueHandler.java
|
Bug-fix: parse arrays
|
|
Java
|
mit
|
b1b50f8f9bf863265c8bc9c0b19f928f00ee5b37
| 0
|
ArjanO/Purify
|
/**
* Copyright (c) 2013 HAN University of Applied Sciences
* Arjan Oortgiese
* Boyd Hofman
* Joëll Portier
* Michiel Westerbeek
* Tim Waalewijn
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package nl.han.ica.ap.purify.module.java.duplicatecode;
import java.util.TreeSet;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.RuleNode;
import nl.han.ica.ap.purify.language.java.JavaBaseVisitor;
import nl.han.ica.ap.purify.language.java.JavaParser.PrimaryContext;
import nl.han.ica.ap.purify.language.java.JavaParser.VariableDeclaratorIdContext;
/**
* Get the hash of a parse tree.
*
* @author Arjan
*/
public class HashVisitor extends JavaBaseVisitor<Integer> {
private static final int PRIME = 31;
private static final int VARIABLE_HASH = 342;
private TreeSet<String> localVariables;
/**
* Get the hash of a method (subtree) parse tree.
*
* @param localVariables local variables in the method.
*/
public HashVisitor(TreeSet<String> localVariables) {
this.localVariables = localVariables;
}
/**
* The visit returns a default value if no method returns anything else.
*/
@Override
protected Integer defaultResult() {
return null;
}
@Override
public Integer visitChildren(RuleNode arg0) {
int iHash = 1;
boolean bVisit = false;
for (int i = arg0.getChildCount() - 1; i >= 0; i--) {
Integer iVisit = visit(arg0.getChild(i));
// If the visit resulted no hash ignore the hash.
if (iVisit != null) {
iHash = iHash * PRIME + iVisit;
bVisit = true;
}
}
if (bVisit) {
return iHash;
} else {
return defaultResult(); // No result.
}
}
/**
* Every node that is visited is seen by this method.
*/
@Override
public Integer visit(ParseTree tree) {
/*
* Call for example visitVariableDeclaratorId if the type of tree is
* VariableDeclaratorId.
*/
Integer result = super.visit(tree);
if (result != null) {
return result;
}
// There is no hash. Try to get a hash code from the ParseTree node.
int iHash = HashCode.getHashCode(tree);
if (iHash != -1) {
return iHash;
}
return defaultResult();
}
/**
* Called for variable declaration.
*
* For example:
* {@code int myVar;}
*/
@Override
public Integer visitVariableDeclaratorId(VariableDeclaratorIdContext ctx) {
if (ctx.Identifier() != null) {
String identifier = ctx.Identifier().getText();
if (identifier != null) {
if (localVariables.contains(ctx.Identifier().getText())) {
return VARIABLE_HASH;
}
// Unknown variable name. Hash the name.
return identifier.hashCode();
}
}
return super.visitVariableDeclaratorId(ctx);
}
/**
* Called if a variable or literal (number or text) is
* used in a expression.
*
* For example:
* {@code myVar = myVar * 2;}
*/
@Override
public Integer visitPrimary(PrimaryContext ctx) {
if (ctx.Identifier() != null && ctx.Identifier().getText() != null) {
String identifier = ctx.Identifier().getText();
if (localVariables.contains(identifier)) {
return VARIABLE_HASH;
}
// Unknown variable or it is a literal. Hash the text.
return identifier.hashCode();
}
return super.visitPrimary(ctx);
}
}
|
src/main/java/nl/han/ica/ap/purify/module/java/duplicatecode/HashVisitor.java
|
/**
* Copyright (c) 2013 HAN University of Applied Sciences
* Arjan Oortgiese
* Boyd Hofman
* Joëll Portier
* Michiel Westerbeek
* Tim Waalewijn
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package nl.han.ica.ap.purify.module.java.duplicatecode;
import java.util.TreeSet;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.RuleNode;
import nl.han.ica.ap.purify.language.java.JavaBaseVisitor;
/**
* Get the hash of a parse tree.
*
* @author Arjan
*/
public class HashVisitor extends JavaBaseVisitor<Integer> {
private static final int PRIME = 31;
private TreeSet<String> localVariables;
/**
* Get the hash of a method (subtree) parse tree.
*
* @param localVariables local variables in the method.
*/
public HashVisitor(TreeSet<String> localVariables) {
this.localVariables = localVariables;
}
/**
* The visit returns a default value if no method returns anything else.
*/
@Override
protected Integer defaultResult() {
return null;
}
@Override
public Integer visitChildren(RuleNode arg0) {
int iHash = 1;
boolean bVisit = false;
for (int i = arg0.getChildCount() - 1; i >= 0; i--) {
Integer iVisit = visit(arg0.getChild(i));
// If the visit resulted no hash ignore the hash.
if (iVisit != null) {
iHash = iHash * PRIME + iVisit;
bVisit = true;
}
}
if (bVisit) {
return iHash;
} else {
return defaultResult(); // No result.
}
}
/**
* Every node that is visited is seen by this method.
*/
@Override
public Integer visit(ParseTree tree) {
/*
* Call for example visitVariableDeclaratorId if the type of tree is
* VariableDeclaratorId.
*/
Integer result = super.visit(tree);
if (result != null) {
return result;
}
// There is no hash. Try to get a hash code from the ParseTree node.
int iHash = HashCode.getHashCode(tree);
if (iHash != -1) {
return iHash;
}
return defaultResult();
}
}
|
Added hashing for known variables with the same hash.
|
src/main/java/nl/han/ica/ap/purify/module/java/duplicatecode/HashVisitor.java
|
Added hashing for known variables with the same hash.
|
|
Java
|
mit
|
beb30a6dc5b44f6ded9db823730af320f0298341
| 0
|
LothrazarMinecraftMods/OverpoweredInventory,PrinceOfAmber/OverpoweredInventory
|
package com.lothrazar.powerinventory.inventory;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.inventory.ContainerPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.InventoryCrafting;
import net.minecraft.inventory.Slot;
import net.minecraft.inventory.SlotCrafting;
import net.minecraft.item.Item;
import net.minecraft.item.ItemArmor;
import net.minecraft.item.ItemStack;
import net.minecraft.util.MathHelper;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import org.apache.logging.log4j.Level;
import com.google.common.collect.Lists;
import com.lothrazar.powerinventory.Const;
import com.lothrazar.powerinventory.ModInv;
import com.lothrazar.powerinventory.inventory.client.GuiBigInventory;
/**
* @author https://github.com/Funwayguy/InfiniteInvo
* @author Forked and altered by https://github.com/PrinceOfAmber/InfiniteInvo
*/
public class BigContainerPlayer extends ContainerPlayer
{
private final int craftSize = 3;//did not exist before, was magic'd as 2 everywhere
private final EntityPlayer thePlayer;
public BigInventoryPlayer invo;
public boolean isLocalWorld;
final int padding = 6;
//these get used here for actual slot, and in GUI for texture
//ender pearl is in the far bottom right corner, and the others move left relative to this
public final int pearlX = 80;
public final int pearlY = 8;
public final int compassX = pearlX;
public final int compassY = pearlY + Const.square;
public final int clockX = pearlX;
public final int clockY = pearlY + 2*Const.square;
public final int echestX = pearlX;
public final int echestY = pearlY + 3*Const.square;
public final int bottleX = GuiBigInventory.texture_width - Const.square - padding - 1;
public final int bottleY = 20 + 2 * Const.square;
//store slot numbers (not indexes) as we go. so that transferStack.. is actually readable
static int S_RESULT;
static int S_CRAFT_START;
static int S_CRAFT_END;
static int S_ARMOR_START;
static int S_ARMOR_END;
static int S_BAR_START;
static int S_BAR_END;
static int S_MAIN_START;
static int S_MAIN_END;
static int S_ECHEST;
static int S_PEARL;
static int S_CLOCK;
static int S_COMPASS;
static int S_BOTTLE;
public BigContainerPlayer(BigInventoryPlayer playerInventory, boolean isLocal, EntityPlayer player)
{
super(playerInventory, isLocal, player);
this.thePlayer = player;
inventorySlots = Lists.newArrayList();//undo everything done by super()
craftMatrix = new InventoryCrafting(this, craftSize, craftSize);
int i,j,cx,cy;//rows and cols of vanilla, not extra
S_RESULT = this.inventorySlots.size();
this.addSlotToContainer(new SlotCrafting(playerInventory.player, this.craftMatrix, this.craftResult, 0,
200,
40));
S_CRAFT_START = this.inventorySlots.size();
for (i = 0; i < craftSize; ++i)
{
for (j = 0; j < craftSize; ++j)
{
cx = 114 + j * Const.square ;
cy = 20 + i * Const.square ;
this.addSlotToContainer(new Slot(this.craftMatrix, j + i * this.craftSize, cx , cy));
}
}
S_CRAFT_END = this.inventorySlots.size() - 1;
S_ARMOR_START = this.inventorySlots.size();
for (i = 0; i < Const.armorSize; ++i)
{
cx = 8;
cy = 8 + i * Const.square;
final int k = i;
this.addSlotToContainer(new Slot(playerInventory, playerInventory.getSizeInventory() - 1 - i, cx, cy)
{
public int getSlotStackLimit()
{
return 1;
}
public boolean isItemValid(ItemStack stack)
{
if (stack == null) return false;
return stack.getItem().isValidArmor(stack, k, thePlayer);
}
@SideOnly(Side.CLIENT)
public String getSlotTexture()
{
return ItemArmor.EMPTY_SLOT_NAMES[k];
}
});
}
S_ARMOR_END = this.inventorySlots.size() - 1;
S_BAR_START = this.inventorySlots.size();
for (i = 0; i < Const.hotbarSize; ++i)
{
cx = 8 + i * Const.square;
cy = 142 + (Const.square * Const.MORE_ROWS);
this.addSlotToContainer(new Slot(playerInventory, i, cx, cy));
}
S_BAR_END = this.inventorySlots.size() - 1;
S_MAIN_START = this.inventorySlots.size();
int slotIndex = Const.hotbarSize;
for( i = 0; i < Const.ALL_ROWS; i++)
{
for ( j = 0; j < Const.ALL_COLS; ++j)
{
cx = 8 + j * Const.square;
cy = 84 + i * Const.square;
this.addSlotToContainer(new Slot(playerInventory, slotIndex, cx, cy));
slotIndex++;
}
}
S_MAIN_END = this.inventorySlots.size() - 1;
S_PEARL = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotEnderPearl(playerInventory, Const.enderPearlSlot, pearlX, pearlY));
S_ECHEST = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotEnderChest(playerInventory, Const.enderChestSlot, echestX, echestY));
S_CLOCK = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotClock(playerInventory, Const.clockSlot, clockX, clockY));
S_COMPASS = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotCompass(playerInventory, Const.compassSlot, compassX, compassY));
S_BOTTLE = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotBottle(playerInventory, Const.bottleSlot, bottleX, bottleY));
this.onCraftMatrixChanged(this.craftMatrix);
this.invo = playerInventory;
}
@Override
public Slot getSlotFromInventory(IInventory invo, int id)
{
Slot slot = super.getSlotFromInventory(invo, id);
if(slot == null)
{
Exception e = new NullPointerException();
ModInv.logger.log(Level.FATAL, e.getStackTrace()[1].getClassName() + "." + e.getStackTrace()[1].getMethodName() + ":" + e.getStackTrace()[1].getLineNumber() + " is requesting slot " + id + " from inventory " + invo.getName() + " (" + invo.getClass().getName() + ") and got NULL!", e);
}
return slot;
}
@Override
public void onContainerClosed(EntityPlayer playerIn)
{
super.onContainerClosed(playerIn);
for (int i = 0; i < craftSize*craftSize; ++i) // was 4
{
ItemStack itemstack = this.craftMatrix.getStackInSlotOnClosing(i);
if (itemstack != null)
{
playerIn.dropPlayerItemWithRandomChoice(itemstack, false);
}
}
this.craftResult.setInventorySlotContents(0, (ItemStack)null);
}
/**
* Called when a player shift-clicks on a slot. You must override this or you will crash when someone does that.
*/
@Override
public ItemStack transferStackInSlot(EntityPlayer p, int slotNumber)
{
//Thanks to coolAlias on the forums :
//http://www.minecraftforum.net/forums/mapping-and-modding/mapping-and-modding-tutorials/1571051-custom-container-how-to-properly-override-shift
//above is from 2013 but still relevant
ItemStack stackCopy = null;
Slot slot = (Slot)this.inventorySlots.get(slotNumber);
if (slot != null && slot.getHasStack())
{
ItemStack stackOrig = slot.getStack();
stackCopy = stackOrig.copy();
if (slotNumber == S_RESULT)
{
if (!this.mergeItemStack(stackOrig, S_BAR_START, S_MAIN_END, true))
{
return null;
}
slot.onSlotChange(stackOrig, stackCopy);
}
else if (slotNumber >= S_CRAFT_START && slotNumber <= S_CRAFT_END)
{
if (!this.mergeItemStack(stackOrig, S_BAR_START, S_MAIN_END, false))//was 9,45
{
return null;
}
}
else if (slotNumber >= S_ARMOR_START && slotNumber <= S_ARMOR_END)
{
if (!this.mergeItemStack(stackOrig, S_MAIN_START, S_MAIN_END, false))
{
return null;
}
}
else if (stackCopy.getItem() instanceof ItemArmor
&& !((Slot)this.inventorySlots.get(S_ARMOR_START + ((ItemArmor)stackCopy.getItem()).armorType)).getHasStack()) // Inventory to armor
{
int j = S_ARMOR_START + ((ItemArmor)stackCopy.getItem()).armorType;
if (!this.mergeItemStack(stackOrig, j, j+1, false))
{
return null;
}
}
else if (slotNumber >= S_MAIN_START && slotNumber <= S_MAIN_END) // main inv grid
{
//only from here are we doing the special items
if(stackCopy.getItem() == Items.ender_pearl &&
(
p.inventory.getStackInSlot(Const.enderPearlSlot) == null ||
p.inventory.getStackInSlot(Const.enderPearlSlot).stackSize < Items.ender_pearl.getItemStackLimit(stackCopy))
)
{
if (!this.mergeItemStack(stackOrig, S_PEARL, S_PEARL+1, false))
{
return null;
}
}
else if(stackCopy.getItem() == Item.getItemFromBlock(Blocks.ender_chest) &&
(
p.inventory.getStackInSlot(Const.enderChestSlot) == null ||
p.inventory.getStackInSlot(Const.enderChestSlot).stackSize < 1)
)
{
if (!this.mergeItemStack(stackOrig, S_ECHEST, S_ECHEST+1, false))
{
return null;
}
}
else if(stackCopy.getItem() == Items.compass &&
(
p.inventory.getStackInSlot(Const.compassSlot) == null ||
p.inventory.getStackInSlot(Const.compassSlot).stackSize < 1)
)
{
if (!this.mergeItemStack(stackOrig, S_COMPASS, S_COMPASS+1, false))
{
return null;
}
}
else if(stackCopy.getItem() == Items.clock &&
(
p.inventory.getStackInSlot(Const.clockSlot) == null ||
p.inventory.getStackInSlot(Const.clockSlot).stackSize < 1)
)
{
if (!this.mergeItemStack(stackOrig, S_CLOCK, S_CLOCK+1, false))
{
return null;
}
}
else if(stackCopy.getItem() == Items.glass_bottle )
{
if (!this.mergeItemStack(stackOrig, S_BOTTLE, S_BOTTLE+1, false))
{
return null;
}
}
else if (!this.mergeItemStack(stackOrig, S_BAR_START, S_BAR_END, false) )
{
return null;
}
}
else if (slotNumber >= S_BAR_START && slotNumber <= S_BAR_END) // Hotbar
{
if (!this.mergeItemStack(stackOrig, S_MAIN_START, S_MAIN_END, false))
{
return null;
}
}
else if(slotNumber == S_PEARL || slotNumber == S_ECHEST || slotNumber == S_COMPASS || slotNumber == S_CLOCK || slotNumber == S_BOTTLE)
{
if (!this.mergeItemStack(stackOrig, S_MAIN_START, S_MAIN_END, false))
{
return null;
}
}
else if (!this.mergeItemStack(stackOrig, 9, invo.getSlotsNotArmor() + 9, false)) // Full range
{
return null;
}
if (stackOrig.stackSize == 0)
{
slot.putStack((ItemStack)null);
}
else
{
slot.onSlotChanged();
}
if (stackOrig.stackSize == stackCopy.stackSize)
{
return null;
}
slot.onPickupFromSlot(p, stackOrig);
}
return stackCopy;
}
}
|
src/main/java/com/lothrazar/powerinventory/inventory/BigContainerPlayer.java
|
package com.lothrazar.powerinventory.inventory;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.inventory.ContainerPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.InventoryCrafting;
import net.minecraft.inventory.Slot;
import net.minecraft.inventory.SlotCrafting;
import net.minecraft.item.Item;
import net.minecraft.item.ItemArmor;
import net.minecraft.item.ItemStack;
import net.minecraft.util.MathHelper;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import org.apache.logging.log4j.Level;
import com.google.common.collect.Lists;
import com.lothrazar.powerinventory.Const;
import com.lothrazar.powerinventory.ModInv;
import com.lothrazar.powerinventory.inventory.client.GuiBigInventory;
/**
* @author https://github.com/Funwayguy/InfiniteInvo
* @author Forked and altered by https://github.com/PrinceOfAmber/InfiniteInvo
*/
public class BigContainerPlayer extends ContainerPlayer
{
private final int craftSize = 3;//did not exist before, was magic'd as 2 everywhere
private final EntityPlayer thePlayer;
public BigInventoryPlayer invo;
public boolean isLocalWorld;
final int padding = 6;
//these get used here for actual slot, and in GUI for texture
//ender pearl is in the far bottom right corner, and the others move left relative to this
public final int pearlX = 80;
public final int pearlY = 8;
public final int compassX = pearlX;
public final int compassY = pearlY + Const.square;
public final int clockX = pearlX;
public final int clockY = pearlY + 2*Const.square;
public final int echestX = pearlX;
public final int echestY = pearlY + 3*Const.square;
public final int bottleX = GuiBigInventory.texture_width - Const.square - padding - 1;
public final int bottleY = 20 + 2 * Const.square;
//store slot numbers (not indexes) as we go. so that transferStack.. is actually readable
static int S_RESULT;
static int S_CRAFT_START;
static int S_CRAFT_END;
static int S_ARMOR_START;
static int S_ARMOR_END;
static int S_BAR_START;
static int S_BAR_END;
static int S_MAIN_START;
static int S_MAIN_END;
static int S_ECHEST;
static int S_PEARL;
static int S_CLOCK;
static int S_COMPASS;
static int S_BOTTLE;
public BigContainerPlayer(BigInventoryPlayer playerInventory, boolean isLocal, EntityPlayer player)
{
super(playerInventory, isLocal, player);
this.thePlayer = player;
inventorySlots = Lists.newArrayList();//undo everything done by super()
craftMatrix = new InventoryCrafting(this, craftSize, craftSize);
int i,j,cx,cy;//rows and cols of vanilla, not extra
S_RESULT = this.inventorySlots.size();
this.addSlotToContainer(new SlotCrafting(playerInventory.player, this.craftMatrix, this.craftResult, 0,
200,
40));
S_CRAFT_START = this.inventorySlots.size();
for (i = 0; i < craftSize; ++i)
{
for (j = 0; j < craftSize; ++j)
{
cx = 114 + j * Const.square ;
cy = 20 + i * Const.square ;
this.addSlotToContainer(new Slot(this.craftMatrix, j + i * this.craftSize, cx , cy));
}
}
S_CRAFT_END = this.inventorySlots.size() - 1;
S_ARMOR_START = this.inventorySlots.size();
for (i = 0; i < Const.armorSize; ++i)
{
cx = 8;
cy = 8 + i * Const.square;
final int k = i;
this.addSlotToContainer(new Slot(playerInventory, playerInventory.getSizeInventory() - 1 - i, cx, cy)
{
public int getSlotStackLimit()
{
return 1;
}
public boolean isItemValid(ItemStack stack)
{
if (stack == null) return false;
return stack.getItem().isValidArmor(stack, k, thePlayer);
}
@SideOnly(Side.CLIENT)
public String getSlotTexture()
{
return ItemArmor.EMPTY_SLOT_NAMES[k];
}
});
}
S_ARMOR_END = this.inventorySlots.size() - 1;
S_BAR_START = this.inventorySlots.size();
for (i = 0; i < Const.hotbarSize; ++i)
{
cx = 8 + i * Const.square;
cy = 142 + (Const.square * Const.MORE_ROWS);
this.addSlotToContainer(new Slot(playerInventory, i, cx, cy));
}
S_BAR_END = this.inventorySlots.size() - 1;
S_MAIN_START = this.inventorySlots.size();
int slotIndex = Const.hotbarSize;
for( i = 0; i < Const.ALL_ROWS; i++)
{
for ( j = 0; j < Const.ALL_COLS; ++j)
{
cx = 8 + j * Const.square;
cy = 84 + i * Const.square;
this.addSlotToContainer(new Slot(playerInventory, slotIndex, cx, cy));
slotIndex++;
}
}
S_MAIN_END = this.inventorySlots.size() - 1;
S_PEARL = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotEnderPearl(playerInventory, Const.enderPearlSlot, pearlX, pearlY));
S_ECHEST = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotEnderChest(playerInventory, Const.enderChestSlot, echestX, echestY));
S_CLOCK = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotClock(playerInventory, Const.clockSlot, clockX, clockY));
S_COMPASS = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotCompass(playerInventory, Const.compassSlot, compassX, compassY));
S_BOTTLE = this.inventorySlots.size() ;
this.addSlotToContainer(new SlotBottle(playerInventory, Const.bottleSlot, bottleX, bottleY));
this.onCraftMatrixChanged(this.craftMatrix);
this.invo = playerInventory;
}
@Override
public Slot getSlotFromInventory(IInventory invo, int id)
{
Slot slot = super.getSlotFromInventory(invo, id);
if(slot == null)
{
Exception e = new NullPointerException();
ModInv.logger.log(Level.FATAL, e.getStackTrace()[1].getClassName() + "." + e.getStackTrace()[1].getMethodName() + ":" + e.getStackTrace()[1].getLineNumber() + " is requesting slot " + id + " from inventory " + invo.getName() + " (" + invo.getClass().getName() + ") and got NULL!", e);
}
return slot;
}
@Override
public void onContainerClosed(EntityPlayer playerIn)
{
super.onContainerClosed(playerIn);
for (int i = 0; i < craftSize*craftSize; ++i) // was 4
{
ItemStack itemstack = this.craftMatrix.getStackInSlotOnClosing(i);
if (itemstack != null)
{
playerIn.dropPlayerItemWithRandomChoice(itemstack, false);
}
}
this.craftResult.setInventorySlotContents(0, (ItemStack)null);
}
/**
* Called when a player shift-clicks on a slot. You must override this or you will crash when someone does that.
*/
@Override
public ItemStack transferStackInSlot(EntityPlayer p, int slotNumber)
{
//Thanks to coolAlias on the forums :
//http://www.minecraftforum.net/forums/mapping-and-modding/mapping-and-modding-tutorials/1571051-custom-container-how-to-properly-override-shift
//above is from 2013 but still relevant
ItemStack stackCopy = null;
Slot slot = (Slot)this.inventorySlots.get(slotNumber);
if (slot != null && slot.getHasStack())
{
ItemStack stackOrig = slot.getStack();
stackCopy = stackOrig.copy();
if (slotNumber == S_RESULT)
{
if (!this.mergeItemStack(stackOrig, S_BAR_START, S_MAIN_END, true))
{
return null;
}
slot.onSlotChange(stackOrig, stackCopy);
}
else if (slotNumber >= S_CRAFT_START && slotNumber <= S_CRAFT_END)
{
if (!this.mergeItemStack(stackOrig, S_BAR_START, S_MAIN_END, false))//was 9,45
{
return null;
}
}
else if (slotNumber >= S_ARMOR_START && slotNumber <= S_ARMOR_END)
{
if (!this.mergeItemStack(stackOrig, S_MAIN_START, S_MAIN_END, false))
{
return null;
}
}
else if (stackCopy.getItem() instanceof ItemArmor
&& !((Slot)this.inventorySlots.get(S_ARMOR_START + ((ItemArmor)stackCopy.getItem()).armorType)).getHasStack()) // Inventory to armor
{
int j = S_ARMOR_START + ((ItemArmor)stackCopy.getItem()).armorType;
if (!this.mergeItemStack(stackOrig, j, j+1, false))
{
return null;
}
}
else if (slotNumber >= S_MAIN_START && slotNumber < S_MAIN_END) // Hotbar
{
//only from here are we doing the special items
if(stackCopy.getItem() == Items.ender_pearl &&
(
p.inventory.getStackInSlot(Const.enderPearlSlot) == null ||
p.inventory.getStackInSlot(Const.enderPearlSlot).stackSize < Items.ender_pearl.getItemStackLimit(stackCopy))
)
{
if (!this.mergeItemStack(stackOrig, S_PEARL, S_PEARL+1, false))
{
return null;
}
}
else if(stackCopy.getItem() == Item.getItemFromBlock(Blocks.ender_chest) &&
(
p.inventory.getStackInSlot(Const.enderChestSlot) == null ||
p.inventory.getStackInSlot(Const.enderChestSlot).stackSize < 1)
)
{
if (!this.mergeItemStack(stackOrig, S_ECHEST, S_ECHEST+1, false))
{
return null;
}
}
else if(stackCopy.getItem() == Items.compass &&
(
p.inventory.getStackInSlot(Const.compassSlot) == null ||
p.inventory.getStackInSlot(Const.compassSlot).stackSize < 1)
)
{
if (!this.mergeItemStack(stackOrig, S_COMPASS, S_COMPASS+1, false))
{
return null;
}
}
else if(stackCopy.getItem() == Items.clock &&
(
p.inventory.getStackInSlot(Const.clockSlot) == null ||
p.inventory.getStackInSlot(Const.clockSlot).stackSize < 1)
)
{
if (!this.mergeItemStack(stackOrig, S_CLOCK, S_CLOCK+1, false))
{
return null;
}
}
else if(stackCopy.getItem() == Items.glass_bottle )
{
if (!this.mergeItemStack(stackOrig, S_BOTTLE, S_BOTTLE+1, false))
{
return null;
}
}
else if (!this.mergeItemStack(stackOrig, S_BAR_START, S_BAR_END, false) )
{
return null;
}
}
else if (slotNumber >= S_BAR_START && slotNumber <= S_BAR_END) // Hotbar
{
if (!this.mergeItemStack(stackOrig, S_MAIN_START, S_MAIN_END, false))
{
return null;
}
}
else if(slotNumber == S_PEARL || slotNumber == S_ECHEST || slotNumber == S_COMPASS || slotNumber == S_CLOCK || slotNumber == S_BOTTLE)
{
if (!this.mergeItemStack(stackOrig, S_MAIN_START, S_MAIN_END, false))
{
return null;
}
}
else if (!this.mergeItemStack(stackOrig, 9, invo.getSlotsNotArmor() + 9, false)) // Full range
{
return null;
}
if (stackOrig.stackSize == 0)
{
slot.putStack((ItemStack)null);
}
else
{
slot.onSlotChanged();
}
if (stackOrig.stackSize == stackCopy.stackSize)
{
return null;
}
slot.onPickupFromSlot(p, stackOrig);
}
return stackCopy;
}
}
|
PowerInventory: fix off by one error for shift clicking the far lower right slot
|
src/main/java/com/lothrazar/powerinventory/inventory/BigContainerPlayer.java
|
PowerInventory: fix off by one error for shift clicking the far lower right slot
|
|
Java
|
mit
|
d1a992e01d8bf490f15d2b346093f208efb41820
| 0
|
EasyBatch/easybatch-framework,EasyBatch/easybatch-framework
|
/*
* The MIT License
*
* Copyright (c) 2015, Mahmoud Ben Hassine (mahmoud@benhassine.fr)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.easybatch.extensions.msexcel;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.easybatch.core.reader.RecordReader;
import org.easybatch.core.record.Header;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.Iterator;
/**
* Reader that reads data from a MS Excel sheet.
* <strong>Only MS Excel XLSX format is supported</strong>
*
* This reader produces {@link MsExcelRecord} instances.
*
* @author Mahmoud Ben Hassine (mahmoud.benhassine@icloud.com)
*/
public class MsExcelRecordReader implements RecordReader {
private File file;
private XSSFSheet sheet;
private XSSFWorkbook workbook;
private Iterator<Row> rowIterator;
private long recordNumber;
/**
* Create a new {@link MsExcelRecordReader}.
*
* @param file the input file
* @throws IOException when an error occurs during file opening
*/
public MsExcelRecordReader(final File file) throws IOException {
this(file, 0);
}
/**
* Create a new {@link MsExcelRecordReader}.
*
* @param file he input file
* @param sheetIndex the sheet index
* @throws IOException when an error occurs during file opening
*/
public MsExcelRecordReader(final File file, final int sheetIndex) throws IOException {
this.file = file;
try {
workbook = new XSSFWorkbook(file);
sheet = workbook.getSheetAt(sheetIndex);
} catch (InvalidFormatException e) {
throw new IOException("Invalid MsExcel file format. Only 'xlsx' is supported", e);
}
}
@Override
public void open() throws Exception {
recordNumber = 1;
rowIterator = sheet.iterator();
}
@Override
public MsExcelRecord readRecord() throws Exception {
if (rowIterator.hasNext()) {
Header header = new Header(recordNumber++, getDataSourceName(), new Date());
Row payload = rowIterator.next();
return new MsExcelRecord(header, payload);
} else {
return null;
}
}
private String getDataSourceName() {
return String.format("Sheet '%s' in file %s", sheet.getSheetName(), file.getAbsolutePath());
}
@Override
public void close() throws Exception {
workbook.close();
}
}
|
easybatch-extensions/easybatch-msexcel/src/main/java/org/easybatch/extensions/msexcel/MsExcelRecordReader.java
|
/*
* The MIT License
*
* Copyright (c) 2015, Mahmoud Ben Hassine (mahmoud@benhassine.fr)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.easybatch.extensions.msexcel;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.easybatch.core.reader.RecordReader;
import org.easybatch.core.record.Header;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.Iterator;
/**
* Reader that read data from a MS Excel sheet.
* <strong>Only MS Excel XLSX format is supported</strong>
*
* This reader produces {@link MsExcelRecord} instances.
*
* @author Mahmoud Ben Hassine (mahmoud.benhassine@icloud.com)
*/
public class MsExcelRecordReader implements RecordReader {
private File file;
private XSSFSheet sheet;
private Iterator<Row> rowIterator;
private long recordNumber;
/**
* Create a new {@link MsExcelRecordReader}.
*
* @param file the input file
* @throws IOException when an error occurs during file opening
*/
public MsExcelRecordReader(final File file) throws IOException {
this(file, 0);
}
/**
* Create a new {@link MsExcelRecordReader}.
*
* @param file he input file
* @param sheetIndex the sheet index
* @throws IOException when an error occurs during file opening
*/
public MsExcelRecordReader(final File file, final int sheetIndex) throws IOException {
this.file = file;
XSSFWorkbook workbook;
try {
workbook = new XSSFWorkbook(file);
sheet = workbook.getSheetAt(sheetIndex);
} catch (InvalidFormatException e) {
throw new IOException("Invalid MsExcel file format. Only 'xlsx' is supported", e);
}
}
@Override
public void open() throws Exception {
recordNumber = 1;
rowIterator = sheet.iterator();
}
@Override
public MsExcelRecord readRecord() throws Exception {
if (rowIterator.hasNext()) {
Header header = new Header(recordNumber++, getDataSourceName(), new Date());
Row payload = rowIterator.next();
return new MsExcelRecord(header, payload);
} else {
return null;
}
}
private String getDataSourceName() {
return String.format("Sheet '%s' in file %s", sheet.getSheetName(), file.getAbsolutePath());
}
@Override
public void close() throws Exception {
// no op
}
}
|
close workbook in MsExcelRecordReader
|
easybatch-extensions/easybatch-msexcel/src/main/java/org/easybatch/extensions/msexcel/MsExcelRecordReader.java
|
close workbook in MsExcelRecordReader
|
|
Java
|
mit
|
827caade5cf563acde7be1dca413640d9c3c4299
| 0
|
ngageoint/geopackage-core-java
|
package mil.nga.geopackage.extension.style;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import mil.nga.geopackage.GeoPackageCore;
import mil.nga.geopackage.GeoPackageException;
import mil.nga.geopackage.extension.BaseExtension;
import mil.nga.geopackage.extension.ExtensionScopeType;
import mil.nga.geopackage.extension.Extensions;
import mil.nga.geopackage.extension.contents.ContentsId;
import mil.nga.geopackage.extension.contents.ContentsIdExtension;
import mil.nga.geopackage.extension.related.RelatedTablesCoreExtension;
import mil.nga.geopackage.property.GeoPackageProperties;
import mil.nga.geopackage.property.PropertyConstants;
/**
* Feature Core Style extension
*
* @author osbornb
* @since 3.1.1
*/
public abstract class FeatureCoreStyleExtension extends BaseExtension {
/**
* Extension author
*/
public static final String EXTENSION_AUTHOR = "nga";
/**
* Extension name without the author
*/
public static final String EXTENSION_NAME_NO_AUTHOR = "feature_style";
/**
* Extension, with author and name
*/
public static final String EXTENSION_NAME = Extensions.buildExtensionName(
EXTENSION_AUTHOR, EXTENSION_NAME_NO_AUTHOR);
/**
* Extension definition URL
*/
public static final String EXTENSION_DEFINITION = GeoPackageProperties
.getProperty(PropertyConstants.EXTENSIONS, EXTENSION_NAME_NO_AUTHOR);
/**
* Table name prefix for mapping styles
*/
public static final String TABLE_MAPPING_STYLE = EXTENSION_AUTHOR
+ "_style_";
/**
* Table name prefix for mapping style defaults
*/
public static final String TABLE_MAPPING_TABLE_STYLE = EXTENSION_AUTHOR
+ "_style_default_";
/**
* Table name prefix for mapping icons
*/
public static final String TABLE_MAPPING_ICON = EXTENSION_AUTHOR + "_icon_";
/**
* Table name prefix for mapping icon defaults
*/
public static final String TABLE_MAPPING_TABLE_ICON = EXTENSION_AUTHOR
+ "_icon_default_";
/**
* Related Tables extension
*/
protected final RelatedTablesCoreExtension relatedTables;
/**
* Contents Id extension
*/
protected final ContentsIdExtension contentsId;
/**
* Constructor
*
* @param geoPackage
* GeoPackage
* @param relatedTables
* related tables
*/
protected FeatureCoreStyleExtension(GeoPackageCore geoPackage,
RelatedTablesCoreExtension relatedTables) {
super(geoPackage);
this.relatedTables = relatedTables;
contentsId = new ContentsIdExtension(geoPackage);
}
/**
* Get or create the extension
*
* @param featureTable
* feature table
* @return extension
*/
private Extensions getOrCreate(String featureTable) {
Extensions extension = getOrCreate(EXTENSION_NAME, featureTable, null,
EXTENSION_DEFINITION, ExtensionScopeType.READ_WRITE);
return extension;
}
/**
* Get the feature tables registered with the extension
*
* @return list of feature table names
*/
public List<String> getTables() {
List<String> tables = new ArrayList<>();
List<Extensions> extensions = getExtensions(EXTENSION_NAME);
if (extensions != null) {
for (Extensions extension : extensions) {
tables.add(extension.getTableName());
}
}
return tables;
}
/**
* Determine if the GeoPackage has the extension
*
* @return true if has extension
*/
public boolean has() {
return super.has(EXTENSION_NAME);
}
/**
* Determine if the GeoPackage has the extension for the feature table
*
* @param featureTable
* feature table
* @return true if has extension
*/
public boolean has(String featureTable) {
return has(EXTENSION_NAME, featureTable, null);
}
/**
* Get the related tables extension
*
* @return related tables extension
*/
public RelatedTablesCoreExtension getRelatedTables() {
return relatedTables;
}
/**
* Get the contents id extension
*
* @return contents id extension
*/
public ContentsIdExtension getContentsId() {
return contentsId;
}
/**
* Create style, icon, table style, and table icon relationships for the
* feature table
*
* @param featureTable
* feature table
*/
public void createRelationships(String featureTable) {
createStyleRelationship(featureTable);
createTableStyleRelationship(featureTable);
createIconRelationship(featureTable);
createTableIconRelationship(featureTable);
}
/**
* Check if feature table has a style, icon, table style, or table icon
* relationships
*
* @param featureTable
* feature table
* @return true if has a relationship
*/
public boolean hasRelationship(String featureTable) {
return hasStyleRelationship(featureTable)
|| hasTableStyleRelationship(featureTable)
|| hasIconRelationship(featureTable)
|| hasTableIconRelationship(featureTable);
}
/**
* Create a style relationship for the feature table
*
* @param featureTable
* feature table
*/
public void createStyleRelationship(String featureTable) {
createStyleRelationship(
getMappingTableName(TABLE_MAPPING_STYLE, featureTable),
featureTable, featureTable, StyleTable.TABLE_NAME);
}
/**
* Determine if a style relationship exists for the feature table
*
* @param featureTable
* feature table
* @return true if relationship exists
*/
public boolean hasStyleRelationship(String featureTable) {
return hasStyleRelationship(
getMappingTableName(TABLE_MAPPING_STYLE, featureTable),
featureTable, StyleTable.TABLE_NAME);
}
/**
* Create a feature table style relationship
*
* @param featureTable
* feature table
*/
public void createTableStyleRelationship(String featureTable) {
createStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_STYLE, featureTable),
featureTable, ContentsId.TABLE_NAME, StyleTable.TABLE_NAME);
}
/**
* Determine if a feature table style relationship exists
*
* @param featureTable
* feature table
* @return true if relationship exists
*/
public boolean hasTableStyleRelationship(String featureTable) {
return hasStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_STYLE, featureTable),
ContentsId.TABLE_NAME, StyleTable.TABLE_NAME);
}
/**
* Create an icon relationship for the feature table
*
* @param featureTable
* feature table
*/
public void createIconRelationship(String featureTable) {
createStyleRelationship(
getMappingTableName(TABLE_MAPPING_ICON, featureTable),
featureTable, featureTable, IconTable.TABLE_NAME);
}
/**
* Determine if an icon relationship exists for the feature table
*
* @param featureTable
* feature table
* @return true if relationship exists
*/
public boolean hasIconRelationship(String featureTable) {
return hasStyleRelationship(
getMappingTableName(TABLE_MAPPING_ICON, featureTable),
featureTable, IconTable.TABLE_NAME);
}
/**
* Create a feature table icon relationship
*
* @param featureTable
* feature table
*/
public void createTableIconRelationship(String featureTable) {
createStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_ICON, featureTable),
featureTable, ContentsId.TABLE_NAME, IconTable.TABLE_NAME);
}
/**
* Determine if a feature table icon relationship exists
*
* @param featureTable
* feature table
* @return true if relationship exists
*/
public boolean hasTableIconRelationship(String featureTable) {
return hasStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_ICON, featureTable),
ContentsId.TABLE_NAME, IconTable.TABLE_NAME);
}
/**
* Get the mapping table name
*
* @param tablePrefix
* table name prefix
* @param featureTable
* feature table name
* @return mapping table name
*/
public String getMappingTableName(String tablePrefix, String featureTable) {
return tablePrefix + featureTable;
}
/**
* Check if the style extension relationship between a feature table and
* style extension table exists
*
* @param mappingTableName
* mapping table name
* @param featureTable
* feature table name
* @param baseTable
* base table name
* @param relatedTable
* related table name
* @return true if relationship exists
*/
private boolean hasStyleRelationship(String mappingTableName,
String baseTable, String relatedTable) {
boolean has = false;
try {
has = relatedTables.hasRelations(baseTable, relatedTable,
mappingTableName);
} catch (SQLException e) {
throw new GeoPackageException(
"Failed to check if Feature Style Relationship exists. Base Table: "
+ baseTable + ", Related Table: " + relatedTable
+ ", Mapping Table: " + mappingTableName, e);
}
return has;
}
/**
* Create a style extension relationship between a feature table and style
* extension table
*
* @param mappingTableName
* mapping table name
* @param featureTable
* feature table name
* @param baseTable
* base table name
* @param relatedTable
* related table name
*/
private void createStyleRelationship(String mappingTableName,
String featureTable, String baseTable, String relatedTable) {
if (!hasStyleRelationship(mappingTableName, baseTable, relatedTable)) {
// Create the extension
getOrCreate(featureTable);
if (baseTable.equals(ContentsId.TABLE_NAME)) {
if (!contentsId.has()) {
contentsId.getOrCreateExtension();
}
}
StyleMappingTable mappingTable = new StyleMappingTable(
mappingTableName);
if (relatedTable.equals(StyleTable.TABLE_NAME)) {
relatedTables.addAttributesRelationship(baseTable,
new StyleTable(), mappingTable);
} else {
relatedTables.addMediaRelationship(baseTable, new IconTable(),
mappingTable);
}
}
}
/**
* Delete the style and icon table and row relationships for all feature
* tables
*/
public void deleteRelationships() {
List<String> tables = getTables();
for (String table : tables) {
deleteRelationships(table);
}
}
/**
* Delete the style and icon table and row relationships for the feature
* table
*
* @param featureTable
* feature table
*/
public void deleteRelationships(String featureTable) {
deleteStyleRelationship(featureTable);
deleteTableStyleRelationship(featureTable);
deleteIconRelationship(featureTable);
deleteTableIconRelationship(featureTable);
}
/**
* Delete a style relationship for the feature table
*
* @param featureTable
* feature table
*/
public void deleteStyleRelationship(String featureTable) {
deleteStyleRelationship(
getMappingTableName(TABLE_MAPPING_STYLE, featureTable),
featureTable);
}
/**
* Delete a table style relationship for the feature table
*
* @param featureTable
* feature table
*/
public void deleteTableStyleRelationship(String featureTable) {
deleteStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_STYLE, featureTable),
featureTable);
}
/**
* Delete a icon relationship for the feature table
*
* @param featureTable
* feature table
*/
public void deleteIconRelationship(String featureTable) {
deleteStyleRelationship(
getMappingTableName(TABLE_MAPPING_ICON, featureTable),
featureTable);
}
/**
* Delete a table icon relationship for the feature table
*
* @param featureTable
* feature table
*/
public void deleteTableIconRelationship(String featureTable) {
deleteStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_ICON, featureTable),
featureTable);
}
/**
* Delete a style extension feature table relationship and the mapping table
*
* @param mappingTableName
* mapping table name
* @param featureTable
* feature table name
*/
private void deleteStyleRelationship(String mappingTableName,
String featureTable) {
relatedTables.removeRelationshipsWithMappingTable(mappingTableName);
if (!hasRelationship(featureTable)) {
try {
if (extensionsDao.isTableExists()) {
extensionsDao.deleteByExtension(EXTENSION_NAME,
featureTable);
}
} catch (SQLException e) {
throw new GeoPackageException(
"Failed to delete Feature Style extension. GeoPackage: "
+ geoPackage.getName() + ", Feature Table: "
+ featureTable, e);
}
}
}
/**
* Completely remove and delete the extension and all styles and icons
*/
public void removeExtension() {
deleteRelationships();
geoPackage.deleteTable(StyleTable.TABLE_NAME);
geoPackage.deleteTable(IconTable.TABLE_NAME);
try {
if (extensionsDao.isTableExists()) {
extensionsDao.deleteByExtension(EXTENSION_NAME);
}
} catch (SQLException e) {
throw new GeoPackageException(
"Failed to delete Feature Style extension. GeoPackage: "
+ geoPackage.getName(), e);
}
}
}
|
src/main/java/mil/nga/geopackage/extension/style/FeatureCoreStyleExtension.java
|
package mil.nga.geopackage.extension.style;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import mil.nga.geopackage.GeoPackageCore;
import mil.nga.geopackage.GeoPackageException;
import mil.nga.geopackage.extension.BaseExtension;
import mil.nga.geopackage.extension.ExtensionScopeType;
import mil.nga.geopackage.extension.Extensions;
import mil.nga.geopackage.extension.contents.ContentsId;
import mil.nga.geopackage.extension.contents.ContentsIdExtension;
import mil.nga.geopackage.extension.related.RelatedTablesCoreExtension;
import mil.nga.geopackage.property.GeoPackageProperties;
import mil.nga.geopackage.property.PropertyConstants;
/**
* Feature Core Style extension
*
* @author osbornb
* @since 3.1.1
*/
public abstract class FeatureCoreStyleExtension extends BaseExtension {
/**
* Extension author
*/
public static final String EXTENSION_AUTHOR = "nga";
/**
* Extension name without the author
*/
public static final String EXTENSION_NAME_NO_AUTHOR = "feature_style";
/**
* Extension, with author and name
*/
public static final String EXTENSION_NAME = Extensions.buildExtensionName(
EXTENSION_AUTHOR, EXTENSION_NAME_NO_AUTHOR);
/**
* Extension definition URL
*/
public static final String EXTENSION_DEFINITION = GeoPackageProperties
.getProperty(PropertyConstants.EXTENSIONS, EXTENSION_NAME_NO_AUTHOR);
/**
* Table name prefix for mapping styles
*/
public static final String TABLE_MAPPING_STYLE = EXTENSION_AUTHOR
+ "_style_";
/**
* Table name prefix for mapping style defaults
*/
public static final String TABLE_MAPPING_TABLE_STYLE = EXTENSION_AUTHOR
+ "_style_default_";
/**
* Table name prefix for mapping icons
*/
public static final String TABLE_MAPPING_ICON = EXTENSION_AUTHOR + "_icon_";
/**
* Table name prefix for mapping icon defaults
*/
public static final String TABLE_MAPPING_TABLE_ICON = EXTENSION_AUTHOR
+ "_icon_default_";
/**
* Related Tables extension
*/
protected final RelatedTablesCoreExtension relatedTables;
/**
* Contents Id extension
*/
protected final ContentsIdExtension contentsId;
/**
* Constructor
*
* @param geoPackage
* GeoPackage
* @param relatedTables
* related tables
*/
protected FeatureCoreStyleExtension(GeoPackageCore geoPackage,
RelatedTablesCoreExtension relatedTables) {
super(geoPackage);
this.relatedTables = relatedTables;
contentsId = new ContentsIdExtension(geoPackage);
}
/**
* Get or create the extension
*
* @param featureTable
* feature table
* @return extension
*/
private Extensions getOrCreate(String featureTable) {
Extensions extension = getOrCreate(EXTENSION_NAME, featureTable, null,
EXTENSION_DEFINITION, ExtensionScopeType.READ_WRITE);
return extension;
}
/**
* Get the feature tables registered with the extension
*
* @return list of feature table names
*/
public List<String> getTables() {
List<String> tables = new ArrayList<>();
List<Extensions> extensions = getExtensions(EXTENSION_NAME);
if (extensions != null) {
for (Extensions extension : extensions) {
tables.add(extension.getTableName());
}
}
return tables;
}
/**
* Determine if the GeoPackage has the extension
*
* @return true if has extension
*/
public boolean has() {
return super.has(EXTENSION_NAME);
}
/**
* Determine if the GeoPackage has the extension for the feature table
*
* @param featureTable
* feature table
* @return true if has extension
*/
public boolean has(String featureTable) {
return has(EXTENSION_NAME, featureTable, null);
}
/**
* Get the related tables extension
*
* @return related tables extension
*/
public RelatedTablesCoreExtension getRelatedTables() {
return relatedTables;
}
/**
* Get the contents id extension
*
* @return contents id extension
*/
public ContentsIdExtension getContentsId() {
return contentsId;
}
/**
* Create style, icon, table style, and table icon relationships for the
* feature table
*
* @param featureTable
* feature table
*/
public void createRelationships(String featureTable) {
createStyleRelationship(featureTable);
createTableStyleRelationship(featureTable);
createIconRelationship(featureTable);
createTableIconRelationship(featureTable);
}
/**
* Check if feature table has a style, icon, table style, or table icon
* relationships
*
* @param featureTable
* feature table
* @return true if has a relationship
*/
public boolean hasRelationship(String featureTable) {
return hasStyleRelationship(featureTable)
|| hasTableStyleRelationship(featureTable)
|| hasIconRelationship(featureTable)
|| hasTableIconRelationship(featureTable);
}
/**
* Create a style relationship for the feature table
*
* @param featureTable
* feature table
*/
public void createStyleRelationship(String featureTable) {
createStyleRelationship(
getMappingTableName(TABLE_MAPPING_STYLE, featureTable),
featureTable, featureTable, StyleTable.TABLE_NAME);
}
/**
* Determine if a style relationship exists for the feature table
*
* @param featureTable
* feature table
* @return true if relationship exists
*/
public boolean hasStyleRelationship(String featureTable) {
return hasStyleRelationship(
getMappingTableName(TABLE_MAPPING_STYLE, featureTable),
featureTable, StyleTable.TABLE_NAME);
}
/**
* Create a feature table style relationship
*
* @param featureTable
* feature table
*/
public void createTableStyleRelationship(String featureTable) {
createStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_STYLE, featureTable),
featureTable, ContentsId.TABLE_NAME, StyleTable.TABLE_NAME);
}
/**
* Determine if a feature table style relationship exists
*
* @param featureTable
* feature table
* @return true if relationship exists
*/
public boolean hasTableStyleRelationship(String featureTable) {
return hasStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_STYLE, featureTable),
ContentsId.TABLE_NAME, StyleTable.TABLE_NAME);
}
/**
* Create an icon relationship for the feature table
*
* @param featureTable
* feature table
*/
public void createIconRelationship(String featureTable) {
createStyleRelationship(
getMappingTableName(TABLE_MAPPING_ICON, featureTable),
featureTable, featureTable, IconTable.TABLE_NAME);
}
/**
* Determine if an icon relationship exists for the feature table
*
* @param featureTable
* feature table
* @return true if relationship exists
*/
public boolean hasIconRelationship(String featureTable) {
return hasStyleRelationship(
getMappingTableName(TABLE_MAPPING_ICON, featureTable),
featureTable, IconTable.TABLE_NAME);
}
/**
* Create a feature table icon relationship
*
* @param featureTable
* feature table
*/
public void createTableIconRelationship(String featureTable) {
createStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_ICON, featureTable),
featureTable, ContentsId.TABLE_NAME, IconTable.TABLE_NAME);
}
/**
* Determine if a feature table icon relationship exists
*
* @param featureTable
* feature table
* @return true if relationship exists
*/
public boolean hasTableIconRelationship(String featureTable) {
return hasStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_ICON, featureTable),
ContentsId.TABLE_NAME, IconTable.TABLE_NAME);
}
/**
* Get the mapping table name
*
* @param tablePrefix
* table name prefix
* @param featureTable
* feature table name
* @return mapping table name
*/
public String getMappingTableName(String tablePrefix, String featureTable) {
return tablePrefix + featureTable;
}
/**
* Check if the style extension relationship between a feature table and
* style extension table exists
*
* @param mappingTableName
* mapping table name
* @param featureTable
* feature table name
* @param baseTable
* base table name
* @param relatedTable
* related table name
* @return true if relationship exists
*/
private boolean hasStyleRelationship(String mappingTableName,
String baseTable, String relatedTable) {
boolean has = false;
try {
has = relatedTables.hasRelations(baseTable, relatedTable,
mappingTableName);
} catch (SQLException e) {
throw new GeoPackageException(
"Failed to check if Feature Style Relationship exists. Base Table: "
+ baseTable + ", Related Table: " + relatedTable
+ ", Mapping Table: " + mappingTableName, e);
}
return has;
}
/**
* Create a style extension relationship between a feature table and style
* extension table
*
* @param mappingTableName
* mapping table name
* @param featureTable
* feature table name
* @param baseTable
* base table name
* @param relatedTable
* related table name
*/
private void createStyleRelationship(String mappingTableName,
String featureTable, String baseTable, String relatedTable) {
if (!hasStyleRelationship(mappingTableName, baseTable, relatedTable)) {
// Create the extension
getOrCreate(featureTable);
if (baseTable.equals(ContentsId.TABLE_NAME)) {
if (!contentsId.has()) {
contentsId.getOrCreateExtension();
}
}
StyleMappingTable mappingTable = new StyleMappingTable(
mappingTableName);
if (relatedTable.equals(StyleTable.TABLE_NAME)) {
relatedTables.addAttributesRelationship(baseTable,
new StyleTable(), mappingTable);
} else {
relatedTables.addMediaRelationship(baseTable, new IconTable(),
mappingTable);
}
}
}
/**
* Delete the style and icon table and row relationships for all feature
* tables
*/
public void deleteRelationships() {
List<String> tables = getTables();
for (String table : tables) {
deleteRelationships(table);
}
}
/**
* Delete the style and icon table and row relationships for the feature
* table
*
* @param featureTable
* feature table
*/
public void deleteRelationships(String featureTable) {
deleteStyleRelationship(featureTable);
deleteTableStyleRelationship(featureTable);
deleteIconRelationship(featureTable);
deleteTableIconRelationship(featureTable);
}
/**
* Delete a style relationship for the feature table
*
* @param featureTable
* feature table
*/
public void deleteStyleRelationship(String featureTable) {
deleteStyleRelationship(
getMappingTableName(TABLE_MAPPING_STYLE, featureTable),
featureTable);
}
/**
* Delete a table style relationship for the feature table
*
* @param featureTable
* feature table
*/
public void deleteTableStyleRelationship(String featureTable) {
deleteStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_STYLE, featureTable),
featureTable);
}
/**
* Delete a icon relationship for the feature table
*
* @param featureTable
* feature table
*/
public void deleteIconRelationship(String featureTable) {
deleteStyleRelationship(
getMappingTableName(TABLE_MAPPING_ICON, featureTable),
featureTable);
}
/**
* Delete a table icon relationship for the feature table
*
* @param featureTable
* feature table
*/
public void deleteTableIconRelationship(String featureTable) {
deleteStyleRelationship(
getMappingTableName(TABLE_MAPPING_TABLE_ICON, featureTable),
featureTable);
}
/**
* Delete a style extension feature table relationship and the mapping table
*
* @param mappingTableName
* mapping table name
* @param featureTable
* feature table name
*/
private void deleteStyleRelationship(String mappingTableName,
String featureTable) {
relatedTables.removeRelationshipsWithMappingTable(mappingTableName);
geoPackage.dropTable(mappingTableName);
if (!hasRelationship(featureTable)) {
try {
if (extensionsDao.isTableExists()) {
extensionsDao.deleteByExtension(EXTENSION_NAME,
featureTable);
}
} catch (SQLException e) {
throw new GeoPackageException(
"Failed to delete Feature Style extension. GeoPackage: "
+ geoPackage.getName() + ", Feature Table: "
+ featureTable, e);
}
}
}
/**
* Completely remove and delete the extension and all styles and icons
*/
public void removeExtension() {
deleteRelationships();
geoPackage.deleteTable(StyleTable.TABLE_NAME);
geoPackage.deleteTable(IconTable.TABLE_NAME);
try {
if (extensionsDao.isTableExists()) {
extensionsDao.deleteByExtension(EXTENSION_NAME);
}
} catch (SQLException e) {
throw new GeoPackageException(
"Failed to delete Feature Style extension. GeoPackage: "
+ geoPackage.getName(), e);
}
}
}
|
mapping table dropped by relationship removal
|
src/main/java/mil/nga/geopackage/extension/style/FeatureCoreStyleExtension.java
|
mapping table dropped by relationship removal
|
|
Java
|
mit
|
7c78da7e02ead6b8360a6f0804c0aec3739e2fee
| 0
|
larunrahul/JADS
|
package com.learning.ads.string.patternmatch;
public class RabinKarpPattenMatching {
private static final int MOD = 1000000007;
private static final int BASE = 31;
/**
* https://en.wikipedia.org/wiki/Rabin%E2%80%93Karp_algorithm
*
* https://www.topcoder.com/community/data-science/data-science-tutorials/introduction-to-string-searching-algorithms/
*
* Complexity:
*
* Worst case - O(n * m) where n is length of text and m is length of pattern
*
* Best case - O(n + m) where n is length of text and m is length of pattern
*
* @param text
* Whole text in which we want to search for a pattern
* @param pattern
* String which we want to search in text
*
* @return Index of the pattern in the text if matched, otherwise -1
*/
public int search(String text, String pattern) {
int n = text.length();
int m = pattern.length();
long patternHash = firstHash(pattern, 0, m);
long textHash = 0;
for (int i = 0; i <= n - m; i++) {
if ((textHash = rollingHash(text, i, i + m, textHash)) == patternHash) {
int j = 0;
while (j < m && text.charAt(i + j) == pattern.charAt(j)) {
j++;
}
if (j == m) {
return i;
}
}
}
return -1;
}
/**
* Calculate the hash for the given array elements with given range.
*
* Following algorithm uses Horner's method to calculate the hash for the string
* (array of characters). It treats every string a polynomial. Also, the
* following algorithm applies modulo to result in every step to prevent
* overflows
*
* http://makble.com/hash-function-algorithm-modular-arithmetic-and-horners-method
*
* https://en.wikipedia.org/wiki/Horner%27s_method
*
* https://stackoverflow.com/questions/20412405/rolling-hash-in-rabin-karp
*
*
*
* @param array
* Input array to whose elements we have to calculate the hash
* @param start
* Start position in array, inclusive
* @param end
* end position in array, exclusive
* @param previousHash
* previous hash used to calculate current hash in constant time
* @return
*/
private long rollingHash(String text, int start, int end, long previousHash) {
long hash = previousHash;
if (start == 0) {
hash = firstHash(text, start, end);
} else {
hash = successiveHash(text, start, end, previousHash);
}
return hash;
}
/*
* https://en.wikipedia.org/wiki/Horner%27s_method
*/
private long firstHash(String text, int start, int end) {
long hash = 0;
for (int i = start; i < end; i++) {
hash = ((hash * BASE) % MOD + text.charAt(i)) % MOD;
}
return hash;
}
/*
* https://stackoverflow.com/questions/20412405/rolling-hash-in-rabin-karp
*/
private long successiveHash(String text, int start, int end, long previousHash) {
long base = 1;
for (int i = start; i < end; i++) {
base = (base * BASE) % MOD;
}
return (previousHash * BASE - ((text.charAt(start - 1) * base) % MOD) + text.charAt(end - 1)) % MOD;
}
/*
* Alternative implementation for successive hash
*/
// private long successiveHash(String text, int start, int end, long
// previousHash) {
// long base = 1;
// for (int i = start; i < end - 1; i++) {
// base = (base * BASE) % MOD;
// }
// long newHash = (BASE * (previousHash - ((text.charAt(start - 1) * base))) +
// text.charAt(end - 1)) % MOD;
// return newHash < 0 ? newHash + MOD : newHash;
// }
}
|
src/main/java/com/learning/ads/string/patternmatch/RabinKarpPattenMatching.java
|
package com.learning.ads.string.patternmatch;
public class RabinKarpPattenMatching {
private static final int MOD = 1000000007;
private static final int BASE = 31;
/**
* https://en.wikipedia.org/wiki/Rabin%E2%80%93Karp_algorithm
*
* https://www.topcoder.com/community/data-science/data-science-tutorials/introduction-to-string-searching-algorithms/
*
* Complexity:
*
* Worst case - O(n * m) where n is length of text and m is length of pattern
*
* Best case - O(n + m) where n is length of text and m is length of pattern
*
* @param text
* Whole text in which we want to search for a pattern
* @param pattern
* String which we want to search in text
*
* @return Index of the pattern in the text if matched, otherwise -1
*/
public int search(String text, String pattern) {
int n = text.length();
int m = pattern.length();
long patternHash = firstHash(pattern, 0, m);
long textHash = 0;
for (int i = 0; i <= n - m; i++) {
if ((textHash = rollingHash(text, i, i + m, textHash)) == patternHash) {
int j = 0;
while (j < m && text.charAt(i + j) == pattern.charAt(j)) {
j++;
}
if (j == m) {
return i;
}
}
}
return -1;
}
/**
* Calculate the hash for the given array elements with given range.
*
* Following algorithm uses Horner's method to calculate the hash for the string
* (array of characters). It treats every string a polynomial. Also, the
* following algorithm applies modulo to result in every step to prevent
* overflows
*
* http://makble.com/hash-function-algorithm-modular-arithmetic-and-horners-method
*
* https://en.wikipedia.org/wiki/Horner%27s_method
*
* https://stackoverflow.com/questions/20412405/rolling-hash-in-rabin-karp
*
*
*
* @param array
* Input array to whose elements we have to calculate the hash
* @param start
* Start position in array, inclusive
* @param end
* end position in array, exclusive
* @param previousHash
* previous hash used to calculate current hash in constant time
* @return
*/
private long rollingHash(String text, int start, int end, long previousHash) {
long hash = previousHash;
if (start == 0) {
hash = firstHash(text, start, end);
} else {
hash = successiveHash(text, start, end, previousHash);
}
return hash;
}
/*
* https://en.wikipedia.org/wiki/Horner%27s_method
*/
private long firstHash(String text, int start, int end) {
long hash = 0;
for (int i = start; i < end; i++) {
hash = (hash * BASE + text.charAt(i)) % MOD;
}
return hash;
}
/*
* https://stackoverflow.com/questions/20412405/rolling-hash-in-rabin-karp
*/
private long successiveHash(String text, int start, int end, long previousHash) {
long base = 1;
for (int i = start; i < end; i++) {
base = (base * BASE) % MOD;
}
return (previousHash * BASE - ((text.charAt(start - 1) * base) % MOD) + text.charAt(end - 1)) % MOD;
}
/*
* Alternative implementation for successive hash
*/
// private long successiveHash(String text, int start, int end, long
// previousHash) {
// long base = 1;
// for (int i = start; i < end - 1; i++) {
// base = (base * BASE) % MOD;
// }
// long newHash = (BASE * (previousHash - ((text.charAt(start - 1) * base))) +
// text.charAt(end - 1)) % MOD;
// return newHash < 0 ? newHash + MOD : newHash;
// }
}
|
Adding additional modulos to prevent overflows
|
src/main/java/com/learning/ads/string/patternmatch/RabinKarpPattenMatching.java
|
Adding additional modulos to prevent overflows
|
|
Java
|
mit
|
0cbd9da16c63f47e80dda2e68967728ad6b530bf
| 0
|
RoboEagles4828/LandrovalToast,RoboEagles4828/LandrovalToast
|
import frc.team4828.landrovaltoast.TestClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.Logger;
import static org.junit.Assert.assertEquals;
/**
* This class is used for testing things on the robot.
*/
@RunWith(JUnit4.class)
public class Tester {
@Test
public void sampleTest() {
Logger.getLogger("InfoLog").log(Level.INFO, "You're currently testing Tester!");
TestClass test = new TestClass();
int returned = test.testTester(3);
Logger.getLogger("InfoLog").log(Level.INFO, "Number: " + returned);
assertEquals(6, returned);
}
}
|
src/test/java/Tester.java
|
import frc.team4828.landrovaltoast.TestClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.logging.Level;
import java.util.logging.Logger;
import static org.junit.Assert.assertEquals;
/**
* This class is used for testing things on the robot.
*/
@RunWith(JUnit4.class)
public class Tester {
@Test
public void sampleTest() {
Logger.getLogger("InfoLog").log(Level.INFO, "You're currently testing Tester!");
TestClass test = new TestClass();
int returned = test.testTester(3);
Logger.getLogger("InfoLog").log(Level.INFO, "Number: " + returned);
assertEquals(6, returned);
}
}
|
Logger Test
Testing Java's logger
|
src/test/java/Tester.java
|
Logger Test
|
|
Java
|
epl-1.0
|
eeba2c0ac4bcfe48fe3c4813e8be4d24c807bf53
| 0
|
codenvy/codenvy,R-Brain/codenvy,R-Brain/codenvy,R-Brain/codenvy,codenvy/codenvy,codenvy/codenvy,R-Brain/codenvy,codenvy/codenvy,codenvy/codenvy,codenvy/codenvy,R-Brain/codenvy,R-Brain/codenvy
|
/*
*
* CODENVY CONFIDENTIAL
* ________________
*
* [2012] - [2013] Codenvy, S.A.
* All Rights Reserved.
* NOTICE: All information contained herein is, and remains
* the property of Codenvy S.A. and its suppliers,
* if any. The intellectual and technical concepts contained
* herein are proprietary to Codenvy S.A.
* and its suppliers and may be covered by U.S. and Foreign Patents,
* patents in process, and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from Codenvy S.A..
*/
package com.codenvy.analytics.metrics;
import com.codenvy.analytics.Injector;
import com.codenvy.analytics.Utils;
import com.codenvy.analytics.datamodel.MapValueData;
import com.codenvy.analytics.datamodel.ValueData;
import com.codenvy.analytics.datamodel.ValueDataUtil;
import com.codenvy.analytics.persistent.DataLoader;
import com.codenvy.analytics.persistent.MongoDataStorage;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import java.io.IOException;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
/**
* It is supposed to load calculated value {@link com.codenvy.analytics.datamodel.ValueData} from the storage.
*
* @author <a href="mailto:abazko@codenvy.com">Anatoliy Bazko</a>
*/
public abstract class ReadBasedMetric extends AbstractMetric {
public static final String EXCLUDE_SIGN = "~";
public static final String SEPARATOR = ",";
public static final long DAY_IN_MILLISECONDS = 86400000L;
public static final Pattern REGISTERED_USER =
Pattern.compile("^(?!(ANONYMOUSUSER_|DEFAULT)).*", Pattern.CASE_INSENSITIVE);
public static final Pattern ANONYMOUS_USER =
Pattern.compile("^(ANONYMOUSUSER_).*", Pattern.CASE_INSENSITIVE);
public static final Pattern PERSISTENT_WS = Pattern.compile("^(?!(TMP-|DEFAULT)).*", Pattern.CASE_INSENSITIVE);
public static final Pattern TEMPORARY_WS = Pattern.compile("^(TMP-).*", Pattern.CASE_INSENSITIVE);
public static final String ASC_SORT_SIGN = "+";
public static final String PRECOMPUTED = "_precomputed";
public final DataLoader dataLoader;
public ReadBasedMetric(String metricName) {
super(metricName);
MongoDataStorage mongoDataStorage = Injector.getInstance(MongoDataStorage.class);
this.dataLoader = mongoDataStorage.createdDataLoader();
}
public ReadBasedMetric(MetricType metricType) {
this(metricType.toString());
}
@Override
public ValueData getValue(Context context) throws IOException {
context = modifyContext(context);
validateRestrictions(context);
if (canReadPrecomputedData(context)) {
Metric metric = MetricFactory.getMetric(getName() + PRECOMPUTED);
return metric.getValue(context);
} else {
ValueData valueData = dataLoader.loadValue(this, context);
return postComputation(valueData, context);
}
}
/**
* Validates restriction before data loading.
*
* @param context
* the execution context
*/
private void validateRestrictions(Context context) {
if (getClass().isAnnotationPresent(FilterRequired.class)) {
MetricFilter requiredFilter = getClass().getAnnotation(FilterRequired.class).value();
if (!context.exists(requiredFilter)) {
throw new MetricRestrictionException(
"Parameter " + requiredFilter + " required to be passed to get the value of the metric");
}
}
}
/**
* Provides ability to modify the result by adding new fields or changing existed ones.
*/
public ValueData postComputation(ValueData valueData, Context clauses) throws IOException {
return valueData;
}
/** Allows modify context before evaluation if necessary. */
protected Context modifyContext(Context context) throws IOException {
return context;
}
private boolean canReadPrecomputedData(Context context) {
return MetricFactory.exists(getName() + PRECOMPUTED)
&& !context.exists(Parameters.FROM_DATE)
&& !context.exists(Parameters.TO_DATE);
}
// --------------------------------------------- storage related methods -------------
/**
* @return the fields are interested in by given metric. In other words, they are valuable for given metric. It
* might returns empty array to read all available fields
*/
public abstract String[] getTrackedFields();
public String getStorageCollectionName() {
return getName().toLowerCase();
}
public String getStorageCollectionName(MetricType metricType) {
return metricType.toString().toLowerCase();
}
public String getStorageCollectionName(String metricName) {
return metricName.toLowerCase();
}
/**
* Returns 'matcher' in term of MongoDB. Basically, it can be treated as 'WHERE' clause in SQL queries.
* See mongoDB related documentation for more details.
*
* @param clauses
* the execution context
* @return {@link DBObject}
*/
public DBObject getFilter(Context clauses) throws IOException, ParseException {
BasicDBObject match = new BasicDBObject();
setDateFilter(clauses, match);
for (MetricFilter filter : clauses.getFilters()) {
if (filter == MetricFilter.USER_COMPANY
|| filter == MetricFilter.USER_FIRST_NAME
|| filter == MetricFilter.USER_LAST_NAME) {
String value = clauses.getAsString(filter);
String[] users = getUsers(filter, value);
match.put(MetricFilter.USER.name().toLowerCase(), new BasicDBObject("$in", users));
} else if (filter == MetricFilter.USER) {
String value = clauses.getAsString(filter);
Object users;
if (value.equalsIgnoreCase(Parameters.USER_TYPES.REGISTERED.name())) {
users = REGISTERED_USER;
} else if (value.equalsIgnoreCase(Parameters.USER_TYPES.ANTONYMOUS.name())) {
users = ANONYMOUS_USER;
} else if (value.equalsIgnoreCase(Parameters.USER_TYPES.ANY.name())) {
continue;
} else {
String[] values = value.split(SEPARATOR);
users = processExclusiveValues(values, filter.isNumericType());
}
match.put(filter.name().toLowerCase(), users);
} else if (filter == MetricFilter.WS) {
String value = clauses.getAsString(filter);
Object ws;
if (value.equalsIgnoreCase(Parameters.WS_TYPES.PERSISTENT.name())) {
ws = PERSISTENT_WS;
} else if (value.equalsIgnoreCase(Parameters.WS_TYPES.TEMPORARY.name())) {
ws = TEMPORARY_WS;
} else if (value.equalsIgnoreCase(Parameters.WS_TYPES.ANY.name())) {
continue;
} else {
String[] values = value.split(SEPARATOR);
ws = processExclusiveValues(values, filter.isNumericType());
}
match.put(filter.name().toLowerCase(), ws);
} else if (filter == MetricFilter.FACTORY) {
Object value = clauses.get(filter);
match.put(filter.name().toLowerCase(), value); //TODO SUPPORT ARRAY FILTER DASBH-429
} else if (filter == MetricFilter.PARAMETERS) {
match.putAll(Utils.fetchEncodedPairs(clauses.getAsString(filter)));
} else {
Object value = clauses.get(filter);
if (value instanceof String) {
String[] values = ((String)value).split(SEPARATOR);
match.put(filter.name().toLowerCase(), processExclusiveValues(values, filter.isNumericType()));
} else if (value.getClass().isArray()) {
match.put(filter.name().toLowerCase(), new BasicDBObject("$in", value));
} else {
throw new IllegalStateException("Unsupported filter value class " + value.getClass());
}
}
}
return new BasicDBObject("$match", match);
}
private Object processExclusiveValues(String[] values, boolean isNumericType) throws IOException, ParseException {
StringBuilder exclusiveValues = new StringBuilder();
StringBuilder inclusiveValues = new StringBuilder();
for (String value : values) {
if (value.startsWith(EXCLUDE_SIGN)) {
if (exclusiveValues.length() != 0) {
exclusiveValues.append(SEPARATOR);
}
exclusiveValues.append(value.substring(1));
} else {
if (inclusiveValues.length() != 0) {
inclusiveValues.append(SEPARATOR);
}
inclusiveValues.append(value);
}
}
if (inclusiveValues.length() != 0) {
values = inclusiveValues.toString().split(SEPARATOR);
if (values.length == 1) {
return isNumericType ? Long.parseLong(values[0]) : values[0];
} else {
return new BasicDBObject("$in", isNumericType ? convertToNumericFormat(values) : values);
}
} else {
values = exclusiveValues.toString().split(SEPARATOR);
return new BasicDBObject("$nin", isNumericType ? convertToNumericFormat(values) : values);
}
}
private long[] convertToNumericFormat(String[] values) {
long[] result = new long[values.length];
for (int i = 0; i < values.length; i++) {
result[i] = Long.parseLong(values[i]);
}
return result;
}
/** The date field contains the date of the event. */
private void setDateFilter(Context clauses, BasicDBObject match) throws ParseException {
DBObject dateFilter = new BasicDBObject();
String fromDate = clauses.getAsString(Parameters.FROM_DATE);
if (fromDate != null) {
if (Utils.isDateFormat(fromDate)) {
dateFilter.put("$gte", clauses.getAsDate(Parameters.FROM_DATE).getTimeInMillis());
} else {
dateFilter.put("$gte", clauses.getAsLong(Parameters.FROM_DATE));
}
}
String toDate = clauses.getAsString(Parameters.TO_DATE);
if (toDate != null) {
if (Utils.isDateFormat(toDate)) {
dateFilter.put("$lt", clauses.getAsDate(Parameters.TO_DATE).getTimeInMillis() + DAY_IN_MILLISECONDS);
} else {
dateFilter.put("$lte", clauses.getAsLong(Parameters.TO_DATE));
}
}
if (dateFilter.keySet().size() > 0) {
match.put(DATE, dateFilter);
}
}
private String[] getUsers(MetricFilter filter, String pattern) throws IOException {
Context.Builder builder = new Context.Builder();
builder.put(filter, pattern);
Context context = builder.build();
Metric metric = MetricFactory.getMetric(MetricType.USERS_PROFILES_LIST);
List<ValueData> users = ValueDataUtil.getAsList(metric, context).getAll();
String[] result = new String[users.size()];
for (int i = 0; i < users.size(); i++) {
MapValueData user = (MapValueData)users.get(i);
Map<String, ValueData> profile = user.getAll();
result[i] = profile.get(ID).getAsString();
}
return result;
}
/**
* Returns the sequences of operations upon data have been retrieved out of storage.
* See mongoDB documentation for more information.
*
* @param clauses
* the execution context
* @return {@link DBObject}
*/
public final DBObject[] getDBOperations(Context clauses) {
return unionDBOperations(getSpecificDBOperations(clauses),
getPaginationDBOperations(clauses));
}
/** Provides basic DB operations: sorting and pagination. */
private DBObject[] getPaginationDBOperations(Context clauses) {
boolean sortExists = clauses.exists(Parameters.SORT);
boolean pageExists = clauses.exists(Parameters.PAGE);
DBObject[] dbOp = new DBObject[(sortExists ? 1 : 0) + (pageExists ? 2 : 0)];
if (sortExists) {
String sortCondition = clauses.getAsString(Parameters.SORT);
String field = sortCondition.substring(1);
boolean asc = sortCondition.substring(0, 1).equals(ASC_SORT_SIGN);
dbOp[0] = new BasicDBObject("$sort", new BasicDBObject(field, asc ? 1 : -1));
}
if (pageExists) {
long page = clauses.getAsLong(Parameters.PAGE);
long perPage = clauses.getAsLong(Parameters.PER_PAGE);
dbOp[sortExists ? 1 : 0] = new BasicDBObject("$skip", (page - 1) * perPage);
dbOp[sortExists ? 2 : 1] = new BasicDBObject("$limit", perPage);
}
return dbOp;
}
protected DBObject[] unionDBOperations(DBObject[] dbOp1, DBObject[] dbOp2) {
DBObject[] result = new DBObject[dbOp1.length + dbOp2.length];
System.arraycopy(dbOp1, 0, result, 0, dbOp1.length);
System.arraycopy(dbOp2, 0, result, dbOp1.length, dbOp2.length);
return result;
}
/** @return DB operations specific for given metric */
public abstract DBObject[] getSpecificDBOperations(Context clauses);
}
|
analytics-core/src/main/java/com/codenvy/analytics/metrics/ReadBasedMetric.java
|
/*
*
* CODENVY CONFIDENTIAL
* ________________
*
* [2012] - [2013] Codenvy, S.A.
* All Rights Reserved.
* NOTICE: All information contained herein is, and remains
* the property of Codenvy S.A. and its suppliers,
* if any. The intellectual and technical concepts contained
* herein are proprietary to Codenvy S.A.
* and its suppliers and may be covered by U.S. and Foreign Patents,
* patents in process, and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from Codenvy S.A..
*/
package com.codenvy.analytics.metrics;
import com.codenvy.analytics.Injector;
import com.codenvy.analytics.Utils;
import com.codenvy.analytics.datamodel.MapValueData;
import com.codenvy.analytics.datamodel.ValueData;
import com.codenvy.analytics.datamodel.ValueDataUtil;
import com.codenvy.analytics.persistent.DataLoader;
import com.codenvy.analytics.persistent.MongoDataStorage;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import java.io.IOException;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
/**
* It is supposed to load calculated value {@link com.codenvy.analytics.datamodel.ValueData} from the storage.
*
* @author <a href="mailto:abazko@codenvy.com">Anatoliy Bazko</a>
*/
public abstract class ReadBasedMetric extends AbstractMetric {
public static final String EXCLUDE_SIGN = "~";
public static final String SEPARATOR = ",";
public static final long DAY_IN_MILLISECONDS = 86400000L;
public static final Pattern REGISTERED_USER =
Pattern.compile("^(?!(ANONYMOUSUSER_|DEFAULT)).*", Pattern.CASE_INSENSITIVE);
public static final Pattern ANONYMOUS_USER =
Pattern.compile("^(ANONYMOUSUSER_).*", Pattern.CASE_INSENSITIVE);
public static final Pattern NON_DEFAULT_WS = Pattern.compile("^(?!DEFAULT).*", Pattern.CASE_INSENSITIVE);
public static final Pattern PERSISTENT_WS = Pattern.compile("^(?!(TMP-|DEFAULT)).*", Pattern.CASE_INSENSITIVE);
public static final Pattern TEMPORARY_WS = Pattern.compile("^(TMP-).*", Pattern.CASE_INSENSITIVE);
public static final String ASC_SORT_SIGN = "+";
public static final String PRECOMPUTED = "_precomputed";
public final DataLoader dataLoader;
public ReadBasedMetric(String metricName) {
super(metricName);
MongoDataStorage mongoDataStorage = Injector.getInstance(MongoDataStorage.class);
this.dataLoader = mongoDataStorage.createdDataLoader();
}
public ReadBasedMetric(MetricType metricType) {
this(metricType.toString());
}
@Override
public ValueData getValue(Context context) throws IOException {
context = modifyContext(context);
validateRestrictions(context);
if (canReadPrecomputedData(context)) {
Metric metric = MetricFactory.getMetric(getName() + PRECOMPUTED);
return metric.getValue(context);
} else {
ValueData valueData = dataLoader.loadValue(this, context);
return postComputation(valueData, context);
}
}
/**
* Validates restriction before data loading.
*
* @param context
* the execution context
*/
private void validateRestrictions(Context context) {
if (getClass().isAnnotationPresent(FilterRequired.class)) {
MetricFilter requiredFilter = getClass().getAnnotation(FilterRequired.class).value();
if (!context.exists(requiredFilter)) {
throw new MetricRestrictionException(
"Parameter " + requiredFilter + " required to be passed to get the value of the metric");
}
}
}
/**
* Provides ability to modify the result by adding new fields or changing existed ones.
*/
public ValueData postComputation(ValueData valueData, Context clauses) throws IOException {
return valueData;
}
/** Allows modify context before evaluation if necessary. */
protected Context modifyContext(Context context) throws IOException {
return context;
}
private boolean canReadPrecomputedData(Context context) {
return MetricFactory.exists(getName() + PRECOMPUTED)
&& !context.exists(Parameters.FROM_DATE)
&& !context.exists(Parameters.TO_DATE);
}
// --------------------------------------------- storage related methods -------------
/**
* @return the fields are interested in by given metric. In other words, they are valuable for given metric. It
* might returns empty array to read all available fields
*/
public abstract String[] getTrackedFields();
public String getStorageCollectionName() {
return getName().toLowerCase();
}
public String getStorageCollectionName(MetricType metricType) {
return metricType.toString().toLowerCase();
}
public String getStorageCollectionName(String metricName) {
return metricName.toLowerCase();
}
/**
* Returns 'matcher' in term of MongoDB. Basically, it can be treated as 'WHERE' clause in SQL queries.
* See mongoDB related documentation for more details.
*
* @param clauses
* the execution context
* @return {@link DBObject}
*/
public DBObject getFilter(Context clauses) throws IOException, ParseException {
BasicDBObject match = new BasicDBObject();
setDateFilter(clauses, match);
for (MetricFilter filter : clauses.getFilters()) {
if (filter == MetricFilter.USER_COMPANY
|| filter == MetricFilter.USER_FIRST_NAME
|| filter == MetricFilter.USER_LAST_NAME) {
String value = clauses.getAsString(filter);
String[] users = getUsers(filter, value);
match.put(MetricFilter.USER.name().toLowerCase(), new BasicDBObject("$in", users));
} else if (filter == MetricFilter.USER) {
String value = clauses.getAsString(filter);
Object users;
if (value.equalsIgnoreCase(Parameters.USER_TYPES.REGISTERED.name())) {
users = REGISTERED_USER;
} else if (value.equalsIgnoreCase(Parameters.USER_TYPES.ANTONYMOUS.name())) {
users = ANONYMOUS_USER;
} else if (value.equalsIgnoreCase(Parameters.USER_TYPES.ANY.name())) {
continue;
} else {
String[] values = value.split(SEPARATOR);
users = processExclusiveValues(values, filter.isNumericType());
}
match.put(filter.name().toLowerCase(), users);
} else if (filter == MetricFilter.WS) {
String value = clauses.getAsString(filter);
Object ws;
if (value.equalsIgnoreCase(Parameters.WS_TYPES.PERSISTENT.name())) {
ws = PERSISTENT_WS;
} else if (value.equalsIgnoreCase(Parameters.WS_TYPES.TEMPORARY.name())) {
ws = TEMPORARY_WS;
} else if (value.equalsIgnoreCase(Parameters.WS_TYPES.ANY.name())) {
continue;
} else {
String[] values = value.split(SEPARATOR);
ws = processExclusiveValues(values, filter.isNumericType());
}
match.put(filter.name().toLowerCase(), ws);
} else if (filter == MetricFilter.FACTORY) {
Object value = clauses.get(filter);
match.put(filter.name().toLowerCase(), value); //TODO SUPPORT ARRAY FILTER DASBH-429
} else if (filter == MetricFilter.PARAMETERS) {
match.putAll(Utils.fetchEncodedPairs(clauses.getAsString(filter)));
} else {
Object value = clauses.get(filter);
if (value instanceof String) {
String[] values = ((String)value).split(SEPARATOR);
match.put(filter.name().toLowerCase(), processExclusiveValues(values, filter.isNumericType()));
} else if (value.getClass().isArray()) {
match.put(filter.name().toLowerCase(), new BasicDBObject("$in", value));
} else {
throw new IllegalStateException("Unsupported filter value class " + value.getClass());
}
}
}
return new BasicDBObject("$match", match);
}
private Object processExclusiveValues(String[] values, boolean isNumericType) throws IOException, ParseException {
StringBuilder exclusiveValues = new StringBuilder();
StringBuilder inclusiveValues = new StringBuilder();
for (String value : values) {
if (value.startsWith(EXCLUDE_SIGN)) {
if (exclusiveValues.length() != 0) {
exclusiveValues.append(SEPARATOR);
}
exclusiveValues.append(value.substring(1));
} else {
if (inclusiveValues.length() != 0) {
inclusiveValues.append(SEPARATOR);
}
inclusiveValues.append(value);
}
}
if (inclusiveValues.length() != 0) {
values = inclusiveValues.toString().split(SEPARATOR);
if (values.length == 1) {
return isNumericType ? Long.parseLong(values[0]) : values[0];
} else {
return new BasicDBObject("$in", isNumericType ? convertToNumericFormat(values) : values);
}
} else {
values = exclusiveValues.toString().split(SEPARATOR);
return new BasicDBObject("$nin", isNumericType ? convertToNumericFormat(values) : values);
}
}
private long[] convertToNumericFormat(String[] values) {
long[] result = new long[values.length];
for (int i = 0; i < values.length; i++) {
result[i] = Long.parseLong(values[i]);
}
return result;
}
/** The date field contains the date of the event. */
private void setDateFilter(Context clauses, BasicDBObject match) throws ParseException {
DBObject dateFilter = new BasicDBObject();
String fromDate = clauses.getAsString(Parameters.FROM_DATE);
if (fromDate != null) {
if (Utils.isDateFormat(fromDate)) {
dateFilter.put("$gte", clauses.getAsDate(Parameters.FROM_DATE).getTimeInMillis());
} else {
dateFilter.put("$gte", clauses.getAsLong(Parameters.FROM_DATE));
}
}
String toDate = clauses.getAsString(Parameters.TO_DATE);
if (toDate != null) {
if (Utils.isDateFormat(toDate)) {
dateFilter.put("$lt", clauses.getAsDate(Parameters.TO_DATE).getTimeInMillis() + DAY_IN_MILLISECONDS);
} else {
dateFilter.put("$lte", clauses.getAsLong(Parameters.TO_DATE));
}
}
if (dateFilter.keySet().size() > 0) {
match.put(DATE, dateFilter);
}
}
private String[] getUsers(MetricFilter filter, String pattern) throws IOException {
Context.Builder builder = new Context.Builder();
builder.put(filter, pattern);
Context context = builder.build();
Metric metric = MetricFactory.getMetric(MetricType.USERS_PROFILES_LIST);
List<ValueData> users = ValueDataUtil.getAsList(metric, context).getAll();
String[] result = new String[users.size()];
for (int i = 0; i < users.size(); i++) {
MapValueData user = (MapValueData)users.get(i);
Map<String, ValueData> profile = user.getAll();
result[i] = profile.get(ID).getAsString();
}
return result;
}
/**
* Returns the sequences of operations upon data have been retrieved out of storage.
* See mongoDB documentation for more information.
*
* @param clauses
* the execution context
* @return {@link DBObject}
*/
public final DBObject[] getDBOperations(Context clauses) {
return unionDBOperations(getSpecificDBOperations(clauses),
getPaginationDBOperations(clauses));
}
/** Provides basic DB operations: sorting and pagination. */
private DBObject[] getPaginationDBOperations(Context clauses) {
boolean sortExists = clauses.exists(Parameters.SORT);
boolean pageExists = clauses.exists(Parameters.PAGE);
DBObject[] dbOp = new DBObject[(sortExists ? 1 : 0) + (pageExists ? 2 : 0)];
if (sortExists) {
String sortCondition = clauses.getAsString(Parameters.SORT);
String field = sortCondition.substring(1);
boolean asc = sortCondition.substring(0, 1).equals(ASC_SORT_SIGN);
dbOp[0] = new BasicDBObject("$sort", new BasicDBObject(field, asc ? 1 : -1));
}
if (pageExists) {
long page = clauses.getAsLong(Parameters.PAGE);
long perPage = clauses.getAsLong(Parameters.PER_PAGE);
dbOp[sortExists ? 1 : 0] = new BasicDBObject("$skip", (page - 1) * perPage);
dbOp[sortExists ? 2 : 1] = new BasicDBObject("$limit", perPage);
}
return dbOp;
}
protected DBObject[] unionDBOperations(DBObject[] dbOp1, DBObject[] dbOp2) {
DBObject[] result = new DBObject[dbOp1.length + dbOp2.length];
System.arraycopy(dbOp1, 0, result, 0, dbOp1.length);
System.arraycopy(dbOp2, 0, result, dbOp1.length, dbOp2.length);
return result;
}
/** @return DB operations specific for given metric */
public abstract DBObject[] getSpecificDBOperations(Context clauses);
}
|
clean up
|
analytics-core/src/main/java/com/codenvy/analytics/metrics/ReadBasedMetric.java
|
clean up
|
|
Java
|
agpl-3.0
|
a613f425b8290e382df9cea1a4ab54055e619f14
| 0
|
wfxiang08/sql-layer-1,qiuyesuifeng/sql-layer,qiuyesuifeng/sql-layer,ngaut/sql-layer,qiuyesuifeng/sql-layer,jaytaylor/sql-layer,relateiq/sql-layer,relateiq/sql-layer,qiuyesuifeng/sql-layer,relateiq/sql-layer,ngaut/sql-layer,jaytaylor/sql-layer,shunwang/sql-layer-1,jaytaylor/sql-layer,wfxiang08/sql-layer-1,relateiq/sql-layer,wfxiang08/sql-layer-1,shunwang/sql-layer-1,ngaut/sql-layer,shunwang/sql-layer-1,ngaut/sql-layer,wfxiang08/sql-layer-1,shunwang/sql-layer-1,jaytaylor/sql-layer
|
/**
* END USER LICENSE AGREEMENT (“EULA”)
*
* READ THIS AGREEMENT CAREFULLY (date: 9/13/2011):
* http://www.akiban.com/licensing/20110913
*
* BY INSTALLING OR USING ALL OR ANY PORTION OF THE SOFTWARE, YOU ARE ACCEPTING
* ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. YOU AGREE THAT THIS
* AGREEMENT IS ENFORCEABLE LIKE ANY WRITTEN AGREEMENT SIGNED BY YOU.
*
* IF YOU HAVE PAID A LICENSE FEE FOR USE OF THE SOFTWARE AND DO NOT AGREE TO
* THESE TERMS, YOU MAY RETURN THE SOFTWARE FOR A FULL REFUND PROVIDED YOU (A) DO
* NOT USE THE SOFTWARE AND (B) RETURN THE SOFTWARE WITHIN THIRTY (30) DAYS OF
* YOUR INITIAL PURCHASE.
*
* IF YOU WISH TO USE THE SOFTWARE AS AN EMPLOYEE, CONTRACTOR, OR AGENT OF A
* CORPORATION, PARTNERSHIP OR SIMILAR ENTITY, THEN YOU MUST BE AUTHORIZED TO SIGN
* FOR AND BIND THE ENTITY IN ORDER TO ACCEPT THE TERMS OF THIS AGREEMENT. THE
* LICENSES GRANTED UNDER THIS AGREEMENT ARE EXPRESSLY CONDITIONED UPON ACCEPTANCE
* BY SUCH AUTHORIZED PERSONNEL.
*
* IF YOU HAVE ENTERED INTO A SEPARATE WRITTEN LICENSE AGREEMENT WITH AKIBAN FOR
* USE OF THE SOFTWARE, THE TERMS AND CONDITIONS OF SUCH OTHER AGREEMENT SHALL
* PREVAIL OVER ANY CONFLICTING TERMS OR CONDITIONS IN THIS AGREEMENT.
*/
package com.akiban.server.aggregation.std;
import com.akiban.junit.OnlyIfNot;
import com.akiban.server.types.NullValueSource;
import java.util.EnumSet;
import com.akiban.junit.Parameterization;
import java.util.Collection;
import com.akiban.junit.ParameterizationBuilder;
import org.junit.runner.RunWith;
import com.akiban.junit.NamedParameterizedRunner;
import java.math.BigInteger;
import java.math.BigDecimal;
import com.akiban.server.aggregation.Aggregator;
import com.akiban.server.types.AkType;
import com.akiban.server.types.util.ValueHolder;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
@RunWith(NamedParameterizedRunner.class)
public class BitAggregatorsTest
{
protected static enum Types
{
DOUBLE
{
@Override
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putDouble(n);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE); // just to change the type of holder to u_bigint
aggregator.output(holder);
return holder.getUBigInt();
}
@Override
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putDouble(5);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
FLOAT
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putFloat(n);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putFloat(5);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
LONG
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putLong(n);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putLong(5);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
INT
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putInt(n);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putInt(5);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
DECIMAL
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putDecimal(BigDecimal.valueOf(n));
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putDecimal(BigDecimal.valueOf(5));
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
U_BIGINT
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putUBigInt(BigInteger.valueOf(n));
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putUBigInt(BigInteger.valueOf(5));
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
};
abstract BigInteger aggregate_test1 (Aggregator aggregator, ValueHolder holder);
abstract BigInteger aggregate_test2 (Aggregator aggregator, ValueHolder holder);
}
protected static final EnumSet<AkType> SUPPORTED = EnumSet.of(AkType.LONG,
AkType.DOUBLE,
AkType.INT,
AkType.U_BIGINT,
AkType.DECIMAL,
AkType.FLOAT);
private static ValueHolder holder = new ValueHolder();
private static final BigInteger N64 = new BigInteger("FFFFFFFFFFFFFFFF", 16);
private static boolean alreadyExc = false;
private final Aggregator aggregator1, aggregator2;
private Aggregator aggregator;
private final BigInteger expected1, expected2;
private BigInteger expected;
private final Types type;
public BitAggregatorsTest (Aggregator aggregator, BigInteger expected1, BigInteger expected2, Types type)
{
this.aggregator1 = aggregator;
this.aggregator2 = aggregator;
this.expected1 = expected1;
this.expected2 = expected2;
this.type = type;
}
@NamedParameterizedRunner.TestParameters
public static Collection<Parameterization> params()
{
ParameterizationBuilder pb = new ParameterizationBuilder();
BigInteger bigInt5 = BigInteger.valueOf(5);
for (AkType t: SUPPORTED)
{
param(pb, Aggregators.bit_and("bit_and", t).get(), BigInteger.ZERO, bigInt5, Types.valueOf(t.name()));
param(pb, Aggregators.bit_or("bit_or", t).get(), BigInteger.valueOf(31), bigInt5, Types.valueOf(t.name()));
param(pb, Aggregators.bit_xor("bit_xor", t).get(), BigInteger.ZERO, bigInt5, Types.valueOf(t.name()));
}
return pb.asList();
}
private static void param(ParameterizationBuilder pb, Aggregator agg, BigInteger expected1,
BigInteger expected2, Types type)
{
pb.add(agg.toString() + type, agg, expected1, expected2, type);
}
@Test
public void testColumnOfDifferentValues ()
{
aggregator = aggregator1;
expected = expected1;
assertEquals(expected, type.aggregate_test1(aggregator, holder));
}
@Test
public void testColumnOfSameValues ()
{
aggregator = aggregator2;
expected = expected2;
assertEquals(expected, type.aggregate_test2(aggregator, holder));
}
@OnlyIfNot("alreadyExc()")
@Test
public void testNullAnd ()
{
aggregator = Aggregators.bit_and("bit_and", AkType.U_BIGINT).get();
expected = N64;
testNull();
}
@OnlyIfNot("alreadyExc()")
@Test
public void testAndOrXor()
{
// test and
aggregator = Aggregators.bit_and("bit_and", AkType.U_BIGINT).get();
expected = BigInteger.valueOf(1);
testNonParam();
// test or
aggregator = Aggregators.bit_or("bit_or", AkType.U_BIGINT).get();
expected = BigInteger.valueOf(3);
testNonParam();
// test xor
aggregator = Aggregators.bit_xor("bit_xor", AkType.U_BIGINT).get();
expected = BigInteger.valueOf(2);
testNonParam();
}
private void testNonParam ()
{
holder.clear();
holder.putUBigInt(BigInteger.ONE);
aggregator.input(holder);
holder.putUBigInt(BigInteger.valueOf(3));
aggregator.input(holder);
holder.clear();
holder.putUBigInt(N64);
aggregator.output(holder);
assertEquals(expected, holder.getUBigInt());
}
@OnlyIfNot("alreadyExc()")
@Test
public void testNullOr ()
{
aggregator = Aggregators.bit_or("bit_or", AkType.U_BIGINT).get();
expected = BigInteger.ZERO;
testNull();
}
@OnlyIfNot("alreadyExc()")
@Test
public void testNullXOr ()
{
aggregator = Aggregators.bit_xor("bit_xor", AkType.U_BIGINT).get();
expected = BigInteger.ZERO;
testNull();
alreadyExc = true;
}
public boolean alreadyExc ()
{
return alreadyExc;
}
private void testNull ()
{
holder.clear();
aggregator.input(NullValueSource.only());
holder.expectType(AkType.U_BIGINT);
aggregator.output(holder);
assertEquals(expected, holder.getUBigInt());
}
}
|
src/test/java/com/akiban/server/aggregation/std/BitAggregatorsTest.java
|
/**
* END USER LICENSE AGREEMENT (“EULA”)
*
* READ THIS AGREEMENT CAREFULLY (date: 9/13/2011):
* http://www.akiban.com/licensing/20110913
*
* BY INSTALLING OR USING ALL OR ANY PORTION OF THE SOFTWARE, YOU ARE ACCEPTING
* ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. YOU AGREE THAT THIS
* AGREEMENT IS ENFORCEABLE LIKE ANY WRITTEN AGREEMENT SIGNED BY YOU.
*
* IF YOU HAVE PAID A LICENSE FEE FOR USE OF THE SOFTWARE AND DO NOT AGREE TO
* THESE TERMS, YOU MAY RETURN THE SOFTWARE FOR A FULL REFUND PROVIDED YOU (A) DO
* NOT USE THE SOFTWARE AND (B) RETURN THE SOFTWARE WITHIN THIRTY (30) DAYS OF
* YOUR INITIAL PURCHASE.
*
* IF YOU WISH TO USE THE SOFTWARE AS AN EMPLOYEE, CONTRACTOR, OR AGENT OF A
* CORPORATION, PARTNERSHIP OR SIMILAR ENTITY, THEN YOU MUST BE AUTHORIZED TO SIGN
* FOR AND BIND THE ENTITY IN ORDER TO ACCEPT THE TERMS OF THIS AGREEMENT. THE
* LICENSES GRANTED UNDER THIS AGREEMENT ARE EXPRESSLY CONDITIONED UPON ACCEPTANCE
* BY SUCH AUTHORIZED PERSONNEL.
*
* IF YOU HAVE ENTERED INTO A SEPARATE WRITTEN LICENSE AGREEMENT WITH AKIBAN FOR
* USE OF THE SOFTWARE, THE TERMS AND CONDITIONS OF SUCH OTHER AGREEMENT SHALL
* PREVAIL OVER ANY CONFLICTING TERMS OR CONDITIONS IN THIS AGREEMENT.
*/
package com.akiban.server.aggregation.std;
import com.akiban.junit.OnlyIfNot;
import com.akiban.server.types.NullValueSource;
import java.util.EnumSet;
import com.akiban.junit.Parameterization;
import java.util.Collection;
import com.akiban.junit.ParameterizationBuilder;
import org.junit.runner.RunWith;
import com.akiban.junit.NamedParameterizedRunner;
import java.math.BigInteger;
import java.math.BigDecimal;
import com.akiban.server.aggregation.Aggregator;
import com.akiban.server.types.AkType;
import com.akiban.server.types.util.ValueHolder;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
@RunWith(NamedParameterizedRunner.class)
public class BitAggregatorsTest
{
protected static enum Types
{
DOUBLE
{
@Override
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putDouble(n);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE); // just to change the type of holder to u_bigint
aggregator.output(holder);
return holder.getUBigInt();
}
@Override
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putDouble(5);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
FLOAT
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putFloat(n);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putFloat(5);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
LONG
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putLong(n);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putLong(5);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
INT
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putInt(n);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putInt(5);
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
DECIMAL
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putDecimal(BigDecimal.valueOf(n));
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putDecimal(BigDecimal.valueOf(5));
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
},
U_BIGINT
{
public BigInteger aggregate_test1(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 20; ++n)
{
holder.putUBigInt(BigInteger.valueOf(n));
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
public BigInteger aggregate_test2(Aggregator aggregator, ValueHolder holder)
{
holder.clear();
for (int n = 0; n < 5; ++n)
{
holder.putUBigInt(BigInteger.valueOf(5));
aggregator.input(holder);
}
holder.putUBigInt(BigInteger.ONE);
aggregator.output(holder);
return holder.getUBigInt();
}
};
abstract BigInteger aggregate_test1 (Aggregator aggregator, ValueHolder holder);
abstract BigInteger aggregate_test2 (Aggregator aggregator, ValueHolder holder);
}
protected static final EnumSet<AkType> SUPPORTED = EnumSet.of(AkType.LONG,
AkType.DOUBLE,
AkType.INT,
AkType.U_BIGINT,
AkType.DECIMAL,
AkType.FLOAT);
private static ValueHolder holder = new ValueHolder();
private static final BigInteger N64 = new BigInteger("FFFFFFFFFFFFFFFF", 16);
private static boolean alreadyExc = false;
private Aggregator aggregator;
private BigInteger expected1;
private BigInteger expected2;
private Types type;
public BitAggregatorsTest (Aggregator aggregator, BigInteger expected1, BigInteger expected2, Types type)
{
this.aggregator = aggregator;
this.expected1 = expected1;
this.expected2 = expected2;
this.type = type;
}
@NamedParameterizedRunner.TestParameters
public static Collection<Parameterization> params()
{
ParameterizationBuilder pb = new ParameterizationBuilder();
BigInteger bigInt5 = BigInteger.valueOf(5);
for (AkType t: SUPPORTED)
{
param(pb, Aggregators.bit_and("bit_and", t).get(), BigInteger.ZERO, bigInt5, Types.valueOf(t.name()));
param(pb, Aggregators.bit_or("bit_or", t).get(), BigInteger.valueOf(31), bigInt5, Types.valueOf(t.name()));
param(pb, Aggregators.bit_xor("bit_xor", t).get(), BigInteger.ZERO, bigInt5, Types.valueOf(t.name()));
}
return pb.asList();
}
private static void param(ParameterizationBuilder pb, Aggregator agg, BigInteger expected1,
BigInteger expected2, Types type)
{
pb.add(agg.toString() + type, agg, expected1, expected2, type);
}
@Test
public void testColumnOfDifferentValues ()
{
assertEquals(expected1, type.aggregate_test1(aggregator, holder));
}
@Test
public void testColumnOfSameValues ()
{
assertEquals(expected2, type.aggregate_test2(aggregator, holder));
}
@OnlyIfNot("alreadyExc()")
@Test
public void testNullAnd ()
{
aggregator = Aggregators.bit_and("bit_and", AkType.U_BIGINT).get();
expected1 = N64;
testNull();
}
@OnlyIfNot("alreadyExc()")
@Test
public void testAndOrXor()
{
// test and
aggregator = Aggregators.bit_and("bit_and", AkType.U_BIGINT).get();
expected1 = BigInteger.valueOf(1);
testNonParam();
// test or
aggregator = Aggregators.bit_or("bit_or", AkType.U_BIGINT).get();
expected1 = BigInteger.valueOf(3);
testNonParam();
// test xor
aggregator = Aggregators.bit_xor("bit_xor", AkType.U_BIGINT).get();
expected1 = BigInteger.valueOf(2);
testNonParam();
}
private void testNonParam ()
{
holder.clear();
holder.putUBigInt(BigInteger.ONE);
aggregator.input(holder);
holder.putUBigInt(BigInteger.valueOf(3));
aggregator.input(holder);
holder.clear();
holder.putUBigInt(N64);
aggregator.output(holder);
assertEquals(expected1, holder.getUBigInt());
}
@OnlyIfNot("alreadyExc()")
@Test
public void testNullOr ()
{
aggregator = Aggregators.bit_or("bit_or", AkType.U_BIGINT).get();
expected1 = BigInteger.ZERO;
testNull();
}
@OnlyIfNot("alreadyExc()")
@Test
public void testNullXOr ()
{
aggregator = Aggregators.bit_xor("bit_xor", AkType.U_BIGINT).get();
expected1 = BigInteger.ZERO;
testNull();
alreadyExc = true;
}
public boolean alreadyExc ()
{
return alreadyExc;
}
private void testNull ()
{
holder.clear();
aggregator.input(NullValueSource.only());
holder.expectType(AkType.U_BIGINT);
aggregator.output(holder);
assertEquals(expected1, holder.getUBigInt());
}
}
|
Separate parameterization fields from working test fields.
|
src/test/java/com/akiban/server/aggregation/std/BitAggregatorsTest.java
|
Separate parameterization fields from working test fields.
|
|
Java
|
agpl-3.0
|
90a77047e3cfee771a61bca987563fa901813500
| 0
|
headsupdev/agile,headsupdev/agile,headsupdev/agile,headsupdev/agile
|
/*
* HeadsUp Agile
* Copyright 2009-2013 Heads Up Development Ltd.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.headsupdev.agile.storage;
import org.headsupdev.agile.api.*;
import org.headsupdev.agile.api.rest.Publish;
import org.headsupdev.support.java.IOUtil;
import org.hibernate.annotations.Type;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Index;
import org.hibernate.search.annotations.Indexed;
import javax.persistence.Entity;
import javax.persistence.DiscriminatorValue;
import java.io.*;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
/**
* TODO Document me!
*
* @author Andrew Williams
* @version $Id$
* @since 1.0
*/
@Entity
@DiscriminatorValue( "xcode" )
@Indexed( index = "XCodeProjects" )
public class StoredXCodeProject
extends StoredProject
implements XCodeProject
{
@Field(index = Index.TOKENIZED)
@Publish
protected String version;
@Field(index = Index.TOKENIZED)
@Publish
protected String bundleId;
@Field(index = Index.TOKENIZED)
@Publish
protected String platform;
@Type( type = "text" )
@Publish
protected String dependencies;
public StoredXCodeProject()
{
}
public StoredXCodeProject( File projectFile )
{
this( projectFile, null );
}
public StoredXCodeProject( File projectFile, String id )
{
this.id = id;
loadFromProjectFile( projectFile );
if ( id == null )
{
this.id = encodeId(name);
}
}
protected String replaceVariables( String in )
{
return in.replace( "$(SRCROOT)/", "" );
}
protected String stripComments( String in )
{
String out = in;
int pos = in.indexOf( "/*" );
if ( pos >= 0 )
{
out = in.substring( 0, pos );
}
return out.trim();
}
private String getProjectFileValue( String line )
{
String ret;
int start = line.indexOf( "=" ) + 1;
int end = line.indexOf( ";", start );
if ( end == -1 )
{
ret = line.substring( start );
}
else
{
ret = line.substring( start, end );
}
ret = ret.trim();
if ( ret.length() > 2 )
{
if ( ret.charAt( 0 ) == '"' && ret.charAt( ret.length() - 1 ) == '"' )
{
ret = ret.substring( 1, ret.length() - 1 );
}
}
return stripComments( replaceVariables( ret ) );
}
protected String getFirstTarget( File file )
{
BufferedReader in = null;
try
{
in = new BufferedReader( new InputStreamReader( new FileInputStream( file ) ) );
String line;
while ( ( line = in.readLine() ) != null && line.indexOf( "targets = (" ) == -1 )
{
// ignore these lines
}
return stripComments( in.readLine() );
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing xcode metadata", e );
}
finally
{
if ( in != null )
{
IOUtil.close( in );
}
}
return null;
}
protected String getFirstBuildConfigurationInList( File file, String buildConfigList )
{
BufferedReader in = null;
try
{
in = new BufferedReader( new InputStreamReader( new FileInputStream( file ) ) );
String line;
while ( ( line = in.readLine() ) != null && line.indexOf( buildConfigList ) == -1 )
{
// ignore these lines
}
while ( ( line = in.readLine() ) != null && line.indexOf( buildConfigList ) == -1 )
{
// ignore these lines again
}
while ( ( line = in.readLine() ) != null && line.indexOf( "buildConfigurations" ) == -1 )
{
// ignore these lines
}
return stripComments( in.readLine() );
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing xcode metadata", e );
}
finally
{
if ( in != null )
{
IOUtil.close( in );
}
}
return null;
}
protected String getBuildConfigurationListForTarget( File file, String target )
{
BufferedReader in = null;
try
{
in = new BufferedReader( new InputStreamReader( new FileInputStream( file ) ) );
String line;
while ( ( line = in.readLine() ) != null && line.indexOf( target ) == -1 )
{
// ignore these lines
}
while ( ( line = in.readLine() ) != null && line.indexOf( "buildConfigurationList" ) == -1 )
{
// ignore these lines
}
return getProjectFileValue( line );
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing xcode metadata", e );
}
finally
{
if ( in != null )
{
IOUtil.close( in );
}
}
return null;
}
protected void loadFromProjectFile( File projectFile )
{
Manager.getLogger( getClass().getName() ).info( "Parsing XCode metadata from " + projectFile.getPath() );
String firstTarget = getFirstTarget( projectFile );
String buildConfigurationList = getBuildConfigurationListForTarget( projectFile, firstTarget );
String configuration = getFirstBuildConfigurationInList( projectFile, buildConfigurationList );
BufferedReader in = null;
try
{
in = new BufferedReader( new InputStreamReader( new FileInputStream( projectFile ) ) );
// skip to buildConfiguration
String line;
while ( ( line = in.readLine() ) != null && line.indexOf( configuration ) == -1 )
{
// ignore these lines
}
String infoFileName = null;
while ( ( line = in.readLine() ) != null )
{
if ( line.indexOf( "INFOPLIST_FILE =" ) != -1 )
{
infoFileName = getProjectFileValue( line );
}
else if ( line.indexOf( "PRODUCT_NAME =" ) != -1 )
{
name = getProjectFileValue( line );
break; // note that if we parse further blocks a break will not be enough
}
}
IOUtil.close( in );
in = null;
File infoFile = null;
if ( infoFileName != null )
{
infoFile = new File( projectFile.getParentFile().getParentFile(), infoFileName );
}
else
{
for ( File possible : projectFile.getParentFile().getParentFile().listFiles() )
{
if ( possible.getName().endsWith( "Info.plist" ) )
{
infoFile = possible;
break;
}
}
}
loadFromInfoFile( infoFile );
File podFile = new File( projectFile.getParentFile().getParentFile(), "Podfile" );
loadFromPodFile( podFile );
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing xcode metadata", e );
}
finally
{
if ( in != null )
{
IOUtil.close( in );
}
}
}
protected void loadFromInfoFile( File infoFile )
{
BufferedReader in = null;
try
{
if ( infoFile != null && infoFile.exists() )
{
Manager.getLogger( getClass().getName() ).info( "Loading extra XCode metadata from " +
infoFile.getPath() );
boolean foundShortString = false;
platform = XCODE_PLATFORM_MACOSX;
String line;
in = new BufferedReader( new InputStreamReader( new FileInputStream( infoFile ) ) );
while ( ( line = in.readLine() ) != null )
{
if ( line.contains( ">CFBundleShortVersionString<") )
{
foundShortString = true;
String versionString = in.readLine();
int start = versionString.indexOf( '>' ) + 1;
int stop = versionString.indexOf( '<', start );
this.version = versionString.substring( start, stop );
}
else if ( line.contains( ">CFBundleVersion<") )
{
if ( !foundShortString )
{
String versionString = in.readLine();
int start = versionString.indexOf( '>' ) + 1;
int stop = versionString.indexOf( '<', start );
this.version = versionString.substring( start, stop );
}
}
else if ( line.contains( ">CFBundleIdentifier<" ) )
{
String bundleString = in.readLine();
int start = bundleString.indexOf( '>' ) + 1;
int stop = bundleString.indexOf( '<', start );
this.bundleId = bundleString.substring( start, stop );
}
else if ( line.contains( ">LSRequiresIPhoneOS<" ) )
{
platform = XCODE_PLATFORM_IOS;
}
}
}
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing project info file", e );
}
finally
{
if ( in != null )
{
IOUtil.close(in);
}
}
}
protected void loadFromPodFile( File podFile )
{
BufferedReader in = null;
try
{
if ( podFile != null && podFile.exists() )
{
Manager.getLogger( getClass().getName() ).info( "Loading CocoaPods metadata from " +
podFile.getPath() );
String line;
boolean first = true;
StringBuilder dependencies = new StringBuilder();
in = new BufferedReader( new InputStreamReader( new FileInputStream( podFile ) ) );
while ( ( line = in.readLine() ) != null )
{
if ( line.startsWith( "pod ") )
{
String[] parts = line.substring( 4 ).split( "," );
if ( parts.length > 2 )
{
continue;
}
CocoaPodDependency dep;
if ( parts.length == 1 )
{
dep = new CocoaPodDependency( trimValue( parts[0] ), "" );
}
else
{
dep = new CocoaPodDependency( trimValue( parts[0] ), trimValue( parts[1] ) );
}
String depStr = dep.getName() + ":" + dep.getVersion();
if ( !first )
{
dependencies.append( ',' );
}
dependencies.append( depStr );
first = false;
}
}
this.dependencies = dependencies.toString();
}
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing CocoaPods file", e );
}
finally
{
if ( in != null )
{
IOUtil.close(in);
}
}
}
private String trimValue( String part )
{
if ( part == null )
{
return null;
}
part = part.trim();
if ( part.startsWith( "'" ) && part.length() >= 2 )
{
return part.substring( 1, part.length() - 1 );
}
return part;
}
public String getVersion()
{
return version;
}
public void setVersion( String version )
{
this.version = version;
}
public String getBundleId()
{
return bundleId;
}
public void setBundleId( String bundleId )
{
this.bundleId = bundleId;
}
public String getPlatform()
{
return platform;
}
public void setPlatform( String platform )
{
this.platform = platform;
}
public void fileModified( String path, File file )
{
if ( path.endsWith( "project.pbxproj" ) )
{
loadFromProjectFile( file );
setUpdated( new Date() );
( (HibernateStorage) Manager.getStorageInstance() ).merge( this );
Manager.getInstance().fireProjectModified( this );
}
else if ( path.endsWith( "Info.plist" ) && file != null && file.getParentFile() != null )
{
// check the parent dir for the xcode project metadata
for ( File xcodeproj : file.getParentFile().listFiles() )
{
if ( xcodeproj.isDirectory() )
{
File pbxproj = new File( xcodeproj, "project.pbxproj" );
if ( pbxproj.exists() )
{
loadFromProjectFile( pbxproj );
setUpdated( new Date() );
( (HibernateStorage) Manager.getStorageInstance() ).merge( this );
Manager.getInstance().fireProjectModified( this );
break;
}
}
}
}
}
public boolean foundMetadata( File directory )
{
File[] files = directory.listFiles();
if ( files == null || files.length == 0 )
{
return false;
}
for ( File file : files )
{
if ( file.isDirectory() && ( new File( file, "project.pbxproj" ) ).exists() )
{
return true;
}
}
return false;
}
public List<XCodeDependency> getDependencies()
{
List<XCodeDependency> ret = new LinkedList<XCodeDependency>();
if ( dependencies == null || dependencies.length() == 0 )
{
return ret;
}
String[] dependencyList = dependencies.split( "," );
for ( String dependency : dependencyList )
{
final String[] values = dependency.split( ":" );
if ( values.length < 2 )
{
ret.add( new CocoaPodDependency( values[0], "" ) );
}
else
{
ret.add( new CocoaPodDependency( values[0], values[1] ) );
}
}
return ret;
}
public String getTypeName()
{
return "XCode";
}
}
class CocoaPodDependency
implements XCodeDependency, Serializable
{
private String name, version;
public CocoaPodDependency( String name, String version )
{
this.name = name;
this.version = version;
}
public String getName()
{
return name;
}
public String getVersion()
{
return version;
}
}
|
agile-storage/src/main/java/org/headsupdev/agile/storage/StoredXCodeProject.java
|
/*
* HeadsUp Agile
* Copyright 2009-2013 Heads Up Development Ltd.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.headsupdev.agile.storage;
import org.headsupdev.agile.api.*;
import org.headsupdev.agile.api.rest.Publish;
import org.headsupdev.support.java.IOUtil;
import org.hibernate.annotations.Type;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Index;
import org.hibernate.search.annotations.Indexed;
import javax.persistence.Entity;
import javax.persistence.DiscriminatorValue;
import java.io.*;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
/**
* TODO Document me!
*
* @author Andrew Williams
* @version $Id$
* @since 1.0
*/
@Entity
@DiscriminatorValue( "xcode" )
@Indexed( index = "XCodeProjects" )
public class StoredXCodeProject
extends StoredProject
implements XCodeProject
{
@Field(index = Index.TOKENIZED)
@Publish
protected String version;
@Field(index = Index.TOKENIZED)
@Publish
protected String bundleId;
@Field(index = Index.TOKENIZED)
@Publish
protected String platform;
@Type( type = "text" )
@Publish
protected String dependencies;
public StoredXCodeProject()
{
}
public StoredXCodeProject( File projectFile )
{
this( projectFile, null );
}
public StoredXCodeProject( File projectFile, String id )
{
this.id = id;
loadFromProjectFile( projectFile );
if ( id == null )
{
this.id = encodeId(name);
}
}
protected String replaceVariables( String in )
{
return in.replace( "$(SRCROOT)/", "" );
}
protected String stripComments( String in )
{
String out = in;
int pos = in.indexOf( "/*" );
if ( pos >= 0 )
{
out = in.substring( 0, pos );
}
return out.trim();
}
private String getProjectFileValue( String line )
{
String ret;
int start = line.indexOf( "=" ) + 1;
int end = line.indexOf( ";", start );
if ( end == -1 )
{
ret = line.substring( start );
}
else
{
ret = line.substring( start, end );
}
ret = ret.trim();
if ( ret.length() > 2 )
{
if ( ret.charAt( 0 ) == '"' && ret.charAt( ret.length() - 1 ) == '"' )
{
ret = ret.substring( 1, ret.length() - 1 );
}
}
return stripComments( replaceVariables( ret ) );
}
protected String getFirstTarget( File file )
{
BufferedReader in = null;
try
{
in = new BufferedReader( new InputStreamReader( new FileInputStream( file ) ) );
String line;
while ( ( line = in.readLine() ) != null && line.indexOf( "targets = (" ) == -1 )
{
// ignore these lines
}
return stripComments( in.readLine() );
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing xcode metadata", e );
}
finally
{
if ( in != null )
{
IOUtil.close( in );
}
}
return null;
}
protected String getFirstBuildConfigurationInList( File file, String buildConfigList )
{
BufferedReader in = null;
try
{
in = new BufferedReader( new InputStreamReader( new FileInputStream( file ) ) );
String line;
while ( ( line = in.readLine() ) != null && line.indexOf( buildConfigList ) == -1 )
{
// ignore these lines
}
while ( ( line = in.readLine() ) != null && line.indexOf( buildConfigList ) == -1 )
{
// ignore these lines again
}
while ( ( line = in.readLine() ) != null && line.indexOf( "buildConfigurations" ) == -1 )
{
// ignore these lines
}
return stripComments( in.readLine() );
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing xcode metadata", e );
}
finally
{
if ( in != null )
{
IOUtil.close( in );
}
}
return null;
}
protected String getBuildConfigurationListForTarget( File file, String target )
{
BufferedReader in = null;
try
{
in = new BufferedReader( new InputStreamReader( new FileInputStream( file ) ) );
String line;
while ( ( line = in.readLine() ) != null && line.indexOf( target ) == -1 )
{
// ignore these lines
}
while ( ( line = in.readLine() ) != null && line.indexOf( "buildConfigurationList" ) == -1 )
{
// ignore these lines
}
return getProjectFileValue( line );
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing xcode metadata", e );
}
finally
{
if ( in != null )
{
IOUtil.close( in );
}
}
return null;
}
protected void loadFromProjectFile( File projectFile )
{
Manager.getLogger( getClass().getName() ).info( "Parsing XCode metadata from " + projectFile.getPath() );
String firstTarget = getFirstTarget( projectFile );
String buildConfigurationList = getBuildConfigurationListForTarget( projectFile, firstTarget );
String configuration = getFirstBuildConfigurationInList( projectFile, buildConfigurationList );
BufferedReader in = null;
try
{
in = new BufferedReader( new InputStreamReader( new FileInputStream( projectFile ) ) );
// skip to buildConfiguration
String line;
while ( ( line = in.readLine() ) != null && line.indexOf( configuration ) == -1 )
{
// ignore these lines
}
String infoFileName = null;
while ( ( line = in.readLine() ) != null )
{
if ( line.indexOf( "INFOPLIST_FILE =" ) != -1 )
{
infoFileName = getProjectFileValue( line );
}
else if ( line.indexOf( "PRODUCT_NAME =" ) != -1 )
{
name = getProjectFileValue( line );
break; // note that if we parse further blocks a break will not be enough
}
}
IOUtil.close( in );
in = null;
File infoFile = null;
if ( infoFileName != null )
{
infoFile = new File( projectFile.getParentFile().getParentFile(), infoFileName );
}
else
{
for ( File possible : projectFile.getParentFile().getParentFile().listFiles() )
{
if ( possible.getName().endsWith( "Info.plist" ) )
{
infoFile = possible;
break;
}
}
}
loadFromInfoFile( infoFile );
File podFile = new File( projectFile.getParentFile().getParentFile(), "Podfile" );
loadFromPodFile( podFile );
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing xcode metadata", e );
}
finally
{
if ( in != null )
{
IOUtil.close( in );
}
}
}
protected void loadFromInfoFile( File infoFile )
{
BufferedReader in = null;
try
{
if ( infoFile != null && infoFile.exists() )
{
Manager.getLogger( getClass().getName() ).info( "Loading extra XCode metadata from " +
infoFile.getPath() );
boolean foundShortString = false;
platform = XCODE_PLATFORM_MACOSX;
String line;
in = new BufferedReader( new InputStreamReader( new FileInputStream( infoFile ) ) );
while ( ( line = in.readLine() ) != null )
{
if ( line.contains( ">CFBundleShortVersionString<") )
{
foundShortString = true;
String versionString = in.readLine();
int start = versionString.indexOf( '>' ) + 1;
int stop = versionString.indexOf( '<', start );
this.version = versionString.substring( start, stop );
}
else if ( line.contains( ">CFBundleVersion<") )
{
if ( !foundShortString )
{
String versionString = in.readLine();
int start = versionString.indexOf( '>' ) + 1;
int stop = versionString.indexOf( '<', start );
this.version = versionString.substring( start, stop );
}
}
else if ( line.contains( ">CFBundleIdentifier<" ) )
{
String bundleString = in.readLine();
int start = bundleString.indexOf( '>' ) + 1;
int stop = bundleString.indexOf( '<', start );
this.bundleId = bundleString.substring( start, stop );
}
else if ( line.contains( ">LSRequiresIPhoneOS<" ) )
{
platform = XCODE_PLATFORM_IOS;
}
}
}
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing project info file", e );
}
finally
{
if ( in != null )
{
IOUtil.close(in);
}
}
}
protected void loadFromPodFile( File podFile )
{
BufferedReader in = null;
try
{
if ( podFile != null && podFile.exists() )
{
Manager.getLogger( getClass().getName() ).info( "Loading CocoaPods metadata from " +
podFile.getPath() );
String line;
boolean first = true;
StringBuilder dependencies = new StringBuilder();
in = new BufferedReader( new InputStreamReader( new FileInputStream( podFile ) ) );
while ( ( line = in.readLine() ) != null )
{
if ( line.startsWith( "pod ") )
{
String[] parts = line.substring( 4 ).split( "," );
if ( parts.length > 2 )
{
continue;
}
CocoaPodDependency dep;
if ( parts.length == 1 )
{
dep = new CocoaPodDependency( trimValue( parts[0] ), "" );
}
else
{
dep = new CocoaPodDependency( trimValue( parts[0] ), trimValue( parts[1] ) );
}
String depStr = dep.getName() + ":" + dep.getVersion();
if ( !first )
{
dependencies.append( ',' );
}
dependencies.append( depStr );
first = false;
}
}
this.dependencies = dependencies.toString();
}
}
catch ( IOException e )
{
Manager.getLogger( getClass().getName() ).error( "Error parsing CocoaPods file", e );
}
finally
{
if ( in != null )
{
IOUtil.close(in);
}
}
}
private String trimValue( String part )
{
if ( part == null )
{
return null;
}
part = part.trim();
if ( part.startsWith( "'" ) && part.length() >= 2 )
{
return part.substring( 1, part.length() - 1 );
}
return part;
}
public String getVersion()
{
return version;
}
public void setVersion( String version )
{
this.version = version;
}
public String getBundleId()
{
return bundleId;
}
public void setBundleId( String bundleId )
{
this.bundleId = bundleId;
}
public String getPlatform()
{
return platform;
}
public void setPlatform( String platform )
{
this.platform = platform;
}
public void fileModified( String path, File file )
{
if ( path.endsWith( "project.pbxproj" ) )
{
loadFromProjectFile( file );
setUpdated( new Date() );
( (HibernateStorage) Manager.getStorageInstance() ).merge( this );
Manager.getInstance().fireProjectModified( this );
}
else if ( path.endsWith( "Info.plist" ) && file != null && file.getParentFile() != null )
{
// check the parent dir for the xcode project metadata
for ( File xcodeproj : file.getParentFile().listFiles() )
{
if ( xcodeproj.isDirectory() )
{
File pbxproj = new File( xcodeproj, "project.pbxproj" );
if ( pbxproj.exists() )
{
loadFromProjectFile( pbxproj );
setUpdated( new Date() );
( (HibernateStorage) Manager.getStorageInstance() ).merge( this );
Manager.getInstance().fireProjectModified( this );
break;
}
}
}
}
}
public boolean foundMetadata( File directory )
{
File[] files = directory.listFiles();
if ( files == null || files.length == 0 )
{
return false;
}
for ( File file : files )
{
if ( file.isDirectory() && ( new File( file, "project.pbxproj" ) ).exists() )
{
return true;
}
}
return false;
}
public List<XCodeDependency> getDependencies()
{
List<XCodeDependency> ret = new LinkedList<XCodeDependency>();
if ( dependencies == null || dependencies.length() == 0 )
{
return ret;
}
String[] dependencyList = dependencies.split( "," );
for ( String dependency : dependencyList )
{
final String[] values = dependency.split( ":" );
ret.add( new CocoaPodDependency( values[0], values[1] ) );
}
return ret;
}
public String getTypeName()
{
return "XCode";
}
}
class CocoaPodDependency
implements XCodeDependency, Serializable
{
private String name, version;
public CocoaPodDependency( String name, String version )
{
this.name = name;
this.version = version;
}
public String getName()
{
return name;
}
public String getVersion()
{
return version;
}
}
|
Render dependencies with no version
|
agile-storage/src/main/java/org/headsupdev/agile/storage/StoredXCodeProject.java
|
Render dependencies with no version
|
|
Java
|
lgpl-2.1
|
151b24ad2786fe5facad13a76d5d65f82432bb98
| 0
|
IsaacYangSLA/nuxeo-drive,IsaacYangSLA/nuxeo-drive,DirkHoffmann/nuxeo-drive,IsaacYangSLA/nuxeo-drive,rsoumyassdi/nuxeo-drive,loopingz/nuxeo-drive,loopingz/nuxeo-drive,arameshkumar/base-nuxeo-drive,DirkHoffmann/nuxeo-drive,DirkHoffmann/nuxeo-drive,loopingz/nuxeo-drive,arameshkumar/nuxeo-drive,DirkHoffmann/nuxeo-drive,arameshkumar/base-nuxeo-drive,rsoumyassdi/nuxeo-drive,IsaacYangSLA/nuxeo-drive,ssdi-drive/nuxeo-drive,ssdi-drive/nuxeo-drive,arameshkumar/nuxeo-drive,arameshkumar/base-nuxeo-drive,rsoumyassdi/nuxeo-drive,DirkHoffmann/nuxeo-drive,rsoumyassdi/nuxeo-drive,ssdi-drive/nuxeo-drive,arameshkumar/nuxeo-drive,loopingz/nuxeo-drive,loopingz/nuxeo-drive,arameshkumar/nuxeo-drive,arameshkumar/base-nuxeo-drive,IsaacYangSLA/nuxeo-drive
|
/*
* (C) Copyright 2012 Nuxeo SA (http://nuxeo.com/) and contributors.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Contributors:
* Antoine Taillefer <ataillefer@nuxeo.com>
*/
package org.nuxeo.drive.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Proxy;
import java.util.Calendar;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.nuxeo.drive.service.impl.AuditDocumentChangeFinder;
import org.nuxeo.drive.service.impl.DocumentChange;
import org.nuxeo.drive.service.impl.DocumentChangeSummary;
import org.nuxeo.ecm.core.api.ClientException;
import org.nuxeo.ecm.core.api.CoreSession;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.TransactionalCoreSessionWrapper;
import org.nuxeo.ecm.core.api.impl.blob.StringBlob;
import org.nuxeo.ecm.core.api.local.LocalSession;
import org.nuxeo.ecm.core.event.EventService;
import org.nuxeo.ecm.core.test.annotations.TransactionalConfig;
import org.nuxeo.ecm.platform.audit.AuditFeature;
import org.nuxeo.runtime.api.Framework;
import org.nuxeo.runtime.test.runner.Deploy;
import org.nuxeo.runtime.test.runner.Features;
import org.nuxeo.runtime.test.runner.FeaturesRunner;
import org.nuxeo.runtime.test.runner.LocalDeploy;
import org.nuxeo.runtime.transaction.TransactionHelper;
import com.google.inject.Inject;
/**
* Test the {@link AuditDocumentChangeFinder}.
*/
@RunWith(FeaturesRunner.class)
@Features(AuditFeature.class)
// We handle transaction start and commit manually to make it possible to have
// several consecutive transactions in a test method
@TransactionalConfig(autoStart = false)
@Deploy("org.nuxeo.drive.core")
@LocalDeploy("org.nuxeo.drive.core:OSGI-INF/test-nuxeodrive-types-contrib.xml")
public class TestAuditDocumentChangeFinder {
@Inject
protected CoreSession session;
@Inject
protected EventService eventService;
@Inject
protected NuxeoDriveManager nuxeoDriveManager;
protected DocumentChangeFinder documentChangeFinder;
protected long lastSuccessfulSync;
protected Set<String> syncRootPaths;
protected DocumentModel folder1;
protected DocumentModel folder2;
@Before
public void init() throws Exception {
documentChangeFinder = new AuditDocumentChangeFinder();
lastSuccessfulSync = Calendar.getInstance().getTimeInMillis();
syncRootPaths = new HashSet<String>();
Framework.getProperties().put("org.nuxeo.drive.document.change.limit",
"10");
dispose(session);
TransactionHelper.startTransaction();
folder1 = session.createDocument(session.createDocumentModel("/",
"folder1", "Folder"));
folder2 = session.createDocument(session.createDocumentModel("/",
"folder2", "Folder"));
session.createDocument(session.createDocumentModel("/", "folder3",
"Folder"));
commitAndWaitForAsyncCompletion();
}
@Test
public void testFindDocumentChanges() throws Exception {
// No sync roots
List<DocumentChange> docChanges = getDocumentChanges();
assertNotNull(docChanges);
assertTrue(docChanges.isEmpty());
// Sync roots but no changes
syncRootPaths.add("/folder1");
syncRootPaths.add("/folder2");
docChanges = getDocumentChanges();
assertTrue(docChanges.isEmpty());
// Create 3 documents, only 2 in sync roots
TransactionHelper.startTransaction();
DocumentModel doc1 = session.createDocument(session.createDocumentModel(
"/folder1", "doc1", "File"));
DocumentModel doc2 = session.createDocument(session.createDocumentModel(
"/folder2", "doc2", "File"));
DocumentModel doc3 = session.createDocument(session.createDocumentModel(
"/folder3", "doc3", "File"));
commitAndWaitForAsyncCompletion();
docChanges = getDocumentChanges();
assertEquals(2, docChanges.size());
DocumentChange docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentCreated", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder2/doc2", docChange.getDocPath());
assertEquals(doc2.getId(), docChange.getDocUuid());
docChange = docChanges.get(1);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentCreated", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
// No changes since last successful sync
docChanges = getDocumentChanges();
assertTrue(docChanges.isEmpty());
// Update both synchronized documents and unsynchronize a root
TransactionHelper.startTransaction();
doc1.setPropertyValue("file:content", new StringBlob(
"The content of file 1."));
session.saveDocument(doc1);
doc2.setPropertyValue("file:content", new StringBlob(
"The content of file 2."));
session.saveDocument(doc2);
commitAndWaitForAsyncCompletion();
syncRootPaths.remove("/folder2");
docChanges = getDocumentChanges();
assertEquals(1, docChanges.size());
docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentModified", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
// Delete a document
TransactionHelper.startTransaction();
session.followTransition(doc1.getRef(), "delete");
commitAndWaitForAsyncCompletion();
docChanges = getDocumentChanges();
assertEquals(1, docChanges.size());
docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("lifecycle_transition_event", docChange.getEventId());
assertEquals("deleted", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
// Restore a deleted document and move a document in a newly
// synchronized root
TransactionHelper.startTransaction();
session.followTransition(doc1.getRef(), "undelete");
session.move(doc3.getRef(), folder2.getRef(), null);
commitAndWaitForAsyncCompletion();
syncRootPaths.add("/folder2");
docChanges = getDocumentChanges();
assertEquals(2, docChanges.size());
docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentMoved", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder2/doc3", docChange.getDocPath());
assertEquals(doc3.getId(), docChange.getDocUuid());
docChange = docChanges.get(1);
assertEquals("test", docChange.getRepositoryId());
assertEquals("lifecycle_transition_event", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
// Too many changes
TransactionHelper.startTransaction();
session.followTransition(doc1.getRef(), "delete");
session.followTransition(doc2.getRef(), "delete");
commitAndWaitForAsyncCompletion();
Framework.getProperties().put("org.nuxeo.drive.document.change.limit",
"1");
try {
getDocumentChanges();
fail("An exception of type TooManyDocumentChangesException should have been thrown since the document change limit is exceeded.");
} catch (TooManyDocumentChangesException e) {
// Expected
}
}
@Test
public void testGetDocumentChangeSummary() throws Exception {
// No sync roots => shouldn't find any changes
DocumentChangeSummary docChangeSummary = getDocumentChangeSummary("Administrator");
assertNotNull(docChangeSummary);
assertTrue(docChangeSummary.getSyncRootPaths().isEmpty());
assertTrue(docChangeSummary.getDocumentChanges().isEmpty());
assertTrue(docChangeSummary.getChangedDocModels().isEmpty());
assertEquals("no_changes", docChangeSummary.getStatusCode());
// Register sync roots => should find changes: the newly
// synchronized root folders as they are updated by the synchronization
// registration process
// TODO: uncomment if needed or remove
// TransactionHelper.startTransaction();
nuxeoDriveManager.registerSynchronizationRoot("Administrator", folder1,
session);
nuxeoDriveManager.registerSynchronizationRoot("Administrator", folder2,
session);
// commitAndWaitForAsyncCompletion();
docChangeSummary = getDocumentChangeSummary("Administrator");
Set<String> expectedSyncRootPaths = new HashSet<String>();
expectedSyncRootPaths.add("/folder1");
expectedSyncRootPaths.add("/folder2");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
assertEquals(2, docChangeSummary.getDocumentChanges().size());
assertEquals(2, docChangeSummary.getChangedDocModels().size());
assertEquals("found_changes", docChangeSummary.getStatusCode());
// Create 3 documents, only 2 in sync roots => should find 2 changes
TransactionHelper.startTransaction();
DocumentModel doc1 = session.createDocumentModel("/folder1", "doc1",
"File");
doc1.setPropertyValue("file:content", new StringBlob(
"The content of file 1."));
doc1 = session.createDocument(doc1);
DocumentModel doc2 = session.createDocumentModel("/folder2", "doc2",
"File");
doc2.setPropertyValue("file:content", new StringBlob(
"The content of file 2."));
doc2 = session.createDocument(doc2);
session.createDocument(session.createDocumentModel("/folder3", "doc3",
"File"));
commitAndWaitForAsyncCompletion();
docChangeSummary = getDocumentChangeSummary("Administrator");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
List<DocumentChange> docChanges = docChangeSummary.getDocumentChanges();
assertEquals(2, docChanges.size());
DocumentChange docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentCreated", docChange.getEventId());
// TODO: understand why the life cycle is not good
// assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder2/doc2", docChange.getDocPath());
assertEquals(doc2.getId(), docChange.getDocUuid());
docChange = docChanges.get(1);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentCreated", docChange.getEventId());
// TODO: understand why the life cycle is not good
// assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
Map<String, DocumentModel> changedDocModels = docChangeSummary.getChangedDocModels();
assertEquals(2, changedDocModels.size());
DocumentModel changedDoc = changedDocModels.get(doc1.getId());
assertNotNull(changedDoc);
assertEquals(doc1.getId(), changedDoc.getId());
assertEquals("/folder1/doc1", changedDoc.getPathAsString());
assertEquals("doc1", changedDoc.getName());
assertEquals("doc1", changedDoc.getTitle());
assertEquals("File", changedDoc.getType());
assertEquals("project", changedDoc.getCurrentLifeCycleState());
changedDoc = changedDocModels.get(doc2.getId());
assertNotNull(changedDoc);
assertEquals(doc2.getId(), changedDoc.getId());
assertEquals("/folder2/doc2", changedDoc.getPathAsString());
assertEquals("doc2", changedDoc.getName());
assertEquals("doc2", changedDoc.getTitle());
assertEquals("File", changedDoc.getType());
assertEquals("project", changedDoc.getCurrentLifeCycleState());
assertEquals("found_changes", docChangeSummary.getStatusCode());
// Create a document that should not be synchronized because not
// adaptable as a FileSystemItem (not Folderish nor a BlobHolder with a
// blob) => should not be considered as a change
TransactionHelper.startTransaction();
session.createDocument(session.createDocumentModel("/folder1",
"notSynchronizableDoc", "NotSynchronizable"));
commitAndWaitForAsyncCompletion();
docChangeSummary = getDocumentChangeSummary("Administrator");
assertTrue(docChangeSummary.getDocumentChanges().isEmpty());
assertTrue(docChangeSummary.getChangedDocModels().isEmpty());
assertEquals("no_changes", docChangeSummary.getStatusCode());
// Create 2 documents in the same sync root: "/folder1" and 1 document
// in another sync root => should find 2 changes for "/folder1"
TransactionHelper.startTransaction();
DocumentModel doc3 = session.createDocumentModel("/folder1", "doc3",
"File");
doc3.setPropertyValue("file:content", new StringBlob(
"The content of file 3."));
doc3 = session.createDocument(doc3);
DocumentModel doc4 = session.createDocumentModel("/folder1", "doc4",
"File");
doc4.setPropertyValue("file:content", new StringBlob(
"The content of file 4."));
doc4 = session.createDocument(doc4);
DocumentModel doc5 = session.createDocumentModel("/folder2", "doc5",
"File");
doc5.setPropertyValue("file:content", new StringBlob(
"The content of file 5."));
doc5 = session.createDocument(doc5);
commitAndWaitForAsyncCompletion();
docChangeSummary = getFolderDocumentChangeSummary("/folder1");
expectedSyncRootPaths.remove("/folder2");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
assertEquals(2, docChangeSummary.getDocumentChanges().size());
assertEquals(2, docChangeSummary.getChangedDocModels().size());
assertEquals("found_changes", docChangeSummary.getStatusCode());
// No changes since last successful sync
docChangeSummary = getDocumentChangeSummary("Administrator");
expectedSyncRootPaths.add("/folder2");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
assertTrue(docChangeSummary.getDocumentChanges().isEmpty());
assertTrue(docChangeSummary.getChangedDocModels().isEmpty());
assertEquals("no_changes", docChangeSummary.getStatusCode());
// Too many changes
TransactionHelper.startTransaction();
session.followTransition(doc1.getRef(), "delete");
session.followTransition(doc2.getRef(), "delete");
commitAndWaitForAsyncCompletion();
Framework.getProperties().put("org.nuxeo.drive.document.change.limit",
"1");
docChangeSummary = getDocumentChangeSummary("Administrator");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
assertTrue(docChangeSummary.getDocumentChanges().isEmpty());
assertTrue(docChangeSummary.getChangedDocModels().isEmpty());
assertEquals("too_many_changes", docChangeSummary.getStatusCode());
}
/**
* Gets the document changes using the {@link AuditDocumentChangeFinder} and
* updates the {@link #lastSuccessfulSync} date.
*/
protected List<DocumentChange> getDocumentChanges()
throws TooManyDocumentChangesException {
List<DocumentChange> docChanges = documentChangeFinder.getDocumentChanges(
true,
session,
syncRootPaths,
lastSuccessfulSync,
Integer.parseInt(Framework.getProperty("org.nuxeo.drive.document.change.limit")));
assertNotNull(docChanges);
lastSuccessfulSync = System.currentTimeMillis();
return docChanges;
}
/**
* Gets the document changes summary for the given user's synchronization
* roots using the {@link NuxeoDriveManager} and updates the
* {@link #lastSuccessfulSync} date.
*/
protected DocumentChangeSummary getDocumentChangeSummary(String userName)
throws ClientException {
DocumentChangeSummary docChangeSummary = nuxeoDriveManager.getDocumentChangeSummary(
true, userName, session, lastSuccessfulSync);
assertNotNull(docChangeSummary);
lastSuccessfulSync = docChangeSummary.getSyncDate();
return docChangeSummary;
}
/**
* Gets the document changes summary for the given folder using the
* {@link NuxeoDriveManager} and updates the {@link #lastSuccessfulSync}
* date.
*/
protected DocumentChangeSummary getFolderDocumentChangeSummary(
String folderPath) throws ClientException {
DocumentChangeSummary docChangeSummary = nuxeoDriveManager.getFolderDocumentChangeSummary(
folderPath, session, lastSuccessfulSync);
assertNotNull(docChangeSummary);
lastSuccessfulSync = docChangeSummary.getSyncDate();
return docChangeSummary;
}
protected void commitAndWaitForAsyncCompletion() throws Exception {
TransactionHelper.commitOrRollbackTransaction();
dispose(session);
eventService.waitForAsyncCompletion();
}
protected void dispose(CoreSession session) throws Exception {
if (Proxy.isProxyClass(session.getClass())) {
InvocationHandler handler = Proxy.getInvocationHandler(session);
if (handler instanceof TransactionalCoreSessionWrapper) {
Field field = TransactionalCoreSessionWrapper.class.getDeclaredField("session");
field.setAccessible(true);
session = (CoreSession) field.get(handler);
}
}
if (!(session instanceof LocalSession)) {
throw new UnsupportedOperationException(
"Cannot dispose session of class " + session.getClass());
}
((LocalSession) session).getSession().dispose();
}
}
|
nuxeo-drive-server/nuxeo-drive-core/src/test/java/org/nuxeo/drive/service/TestAuditDocumentChangeFinder.java
|
/*
* (C) Copyright 2012 Nuxeo SA (http://nuxeo.com/) and contributors.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Contributors:
* Antoine Taillefer <ataillefer@nuxeo.com>
*/
package org.nuxeo.drive.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Proxy;
import java.util.Calendar;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.nuxeo.drive.service.impl.AuditDocumentChangeFinder;
import org.nuxeo.drive.service.impl.DocumentChange;
import org.nuxeo.drive.service.impl.DocumentChangeSummary;
import org.nuxeo.ecm.core.api.ClientException;
import org.nuxeo.ecm.core.api.CoreSession;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.TransactionalCoreSessionWrapper;
import org.nuxeo.ecm.core.api.impl.blob.StringBlob;
import org.nuxeo.ecm.core.api.local.LocalSession;
import org.nuxeo.ecm.core.event.EventService;
import org.nuxeo.ecm.core.test.annotations.TransactionalConfig;
import org.nuxeo.ecm.platform.audit.AuditFeature;
import org.nuxeo.runtime.api.Framework;
import org.nuxeo.runtime.test.runner.Deploy;
import org.nuxeo.runtime.test.runner.Features;
import org.nuxeo.runtime.test.runner.FeaturesRunner;
import org.nuxeo.runtime.test.runner.LocalDeploy;
import org.nuxeo.runtime.transaction.TransactionHelper;
import com.google.inject.Inject;
/**
* Test the {@link AuditDocumentChangeFinder}.
*/
@RunWith(FeaturesRunner.class)
@Features(AuditFeature.class)
// We handle transaction start and commit manually to make it possible to have
// several consecutive transactions in a test method
@TransactionalConfig(autoStart = false)
@Deploy("org.nuxeo.drive.core")
@LocalDeploy("org.nuxeo.drive.core:OSGI-INF/test-nuxeodrive-types-contrib.xml")
public class TestAuditDocumentChangeFinder {
@Inject
protected CoreSession session;
@Inject
protected EventService eventService;
@Inject
protected NuxeoDriveManager nuxeoDriveManager;
protected DocumentChangeFinder documentChangeFinder;
protected long lastSuccessfulSync;
protected Set<String> syncRootPaths;
protected DocumentModel folder1;
protected DocumentModel folder2;
@Before
public void init() throws Exception {
documentChangeFinder = new AuditDocumentChangeFinder();
lastSuccessfulSync = Calendar.getInstance().getTimeInMillis();
syncRootPaths = new HashSet<String>();
Framework.getProperties().put("org.nuxeo.drive.document.change.limit",
"10");
dispose(session);
TransactionHelper.startTransaction();
folder1 = session.createDocument(session.createDocumentModel("/",
"folder1", "Folder"));
folder2 = session.createDocument(session.createDocumentModel("/",
"folder2", "Folder"));
session.createDocument(session.createDocumentModel("/", "folder3",
"Folder"));
commitAndWaitForAsyncCompletion();
}
@Test
public void testFindDocumentChanges() throws Exception {
// No sync roots
List<DocumentChange> docChanges = getDocumentChanges();
assertNotNull(docChanges);
assertTrue(docChanges.isEmpty());
// Sync roots but no changes
syncRootPaths.add("/folder1");
syncRootPaths.add("/folder2");
docChanges = getDocumentChanges();
assertTrue(docChanges.isEmpty());
// Create 3 documents, only 2 in sync roots
TransactionHelper.startTransaction();
DocumentModel doc1 = session.createDocument(session.createDocumentModel(
"/folder1", "doc1", "File"));
DocumentModel doc2 = session.createDocument(session.createDocumentModel(
"/folder2", "doc2", "File"));
DocumentModel doc3 = session.createDocument(session.createDocumentModel(
"/folder3", "doc3", "File"));
commitAndWaitForAsyncCompletion();
docChanges = getDocumentChanges();
assertEquals(2, docChanges.size());
DocumentChange docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentCreated", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder2/doc2", docChange.getDocPath());
assertEquals(doc2.getId(), docChange.getDocUuid());
docChange = docChanges.get(1);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentCreated", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
// No changes since last successful sync
docChanges = getDocumentChanges();
assertTrue(docChanges.isEmpty());
// Update both synchronized documents and unsynchronize a root
TransactionHelper.startTransaction();
doc1.setPropertyValue("file:content", new StringBlob(
"The content of file 1."));
session.saveDocument(doc1);
doc2.setPropertyValue("file:content", new StringBlob(
"The content of file 2."));
session.saveDocument(doc2);
commitAndWaitForAsyncCompletion();
syncRootPaths.remove("/folder2");
docChanges = getDocumentChanges();
assertEquals(1, docChanges.size());
docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentModified", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
// Delete a document
TransactionHelper.startTransaction();
session.followTransition(doc1.getRef(), "delete");
commitAndWaitForAsyncCompletion();
docChanges = getDocumentChanges();
assertEquals(1, docChanges.size());
docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("lifecycle_transition_event", docChange.getEventId());
assertEquals("deleted", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
// Restore a deleted document and move a document in a newly
// synchronized root
TransactionHelper.startTransaction();
session.followTransition(doc1.getRef(), "undelete");
session.move(doc3.getRef(), folder2.getRef(), null);
commitAndWaitForAsyncCompletion();
syncRootPaths.add("/folder2");
docChanges = getDocumentChanges();
assertEquals(2, docChanges.size());
docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentMoved", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder2/doc3", docChange.getDocPath());
assertEquals(doc3.getId(), docChange.getDocUuid());
docChange = docChanges.get(1);
assertEquals("test", docChange.getRepositoryId());
assertEquals("lifecycle_transition_event", docChange.getEventId());
assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
// Too many changes
TransactionHelper.startTransaction();
session.followTransition(doc1.getRef(), "delete");
session.followTransition(doc2.getRef(), "delete");
commitAndWaitForAsyncCompletion();
Framework.getProperties().put("org.nuxeo.drive.document.change.limit",
"1");
try {
getDocumentChanges();
fail("An exception of type TooManyDocumentChangesException should have been thrown since the document change limit is exceeded.");
} catch (TooManyDocumentChangesException e) {
// Expected
}
}
@Test
public void testGetDocumentChangeSummary() throws Exception {
// No sync roots => shouldn't find any changes
DocumentChangeSummary docChangeSummary = getDocumentChangeSummary("Administrator");
assertNotNull(docChangeSummary);
assertTrue(docChangeSummary.getSyncRootPaths().isEmpty());
assertTrue(docChangeSummary.getDocumentChanges().isEmpty());
assertTrue(docChangeSummary.getChangedDocModels().isEmpty());
assertEquals("no_changes", docChangeSummary.getStatusCode());
// Register sync roots => should find changes: the newly
// synchronized root folders as they are updated by the synchronization
// registration process
// TODO: uncomment if not needed
// TransactionHelper.startTransaction();
nuxeoDriveManager.registerSynchronizationRoot("Administrator", folder1,
session);
nuxeoDriveManager.registerSynchronizationRoot("Administrator", folder2,
session);
// commitAndWaitForAsyncCompletion();
docChangeSummary = getDocumentChangeSummary("Administrator");
Set<String> expectedSyncRootPaths = new HashSet<String>();
expectedSyncRootPaths.add("/folder1");
expectedSyncRootPaths.add("/folder2");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
assertEquals(2, docChangeSummary.getDocumentChanges().size());
assertEquals(2, docChangeSummary.getChangedDocModels().size());
assertEquals("found_changes", docChangeSummary.getStatusCode());
// Create 3 documents, only 2 in sync roots => should find 2 changes
TransactionHelper.startTransaction();
DocumentModel doc1 = session.createDocumentModel("/folder1", "doc1",
"File");
doc1.setPropertyValue("file:content", new StringBlob(
"The content of file 1."));
doc1 = session.createDocument(doc1);
DocumentModel doc2 = session.createDocumentModel("/folder2", "doc2",
"File");
doc2.setPropertyValue("file:content", new StringBlob(
"The content of file 2."));
doc2 = session.createDocument(doc2);
session.createDocument(session.createDocumentModel("/folder3", "doc3",
"File"));
commitAndWaitForAsyncCompletion();
docChangeSummary = getDocumentChangeSummary("Administrator");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
List<DocumentChange> docChanges = docChangeSummary.getDocumentChanges();
assertEquals(2, docChanges.size());
DocumentChange docChange = docChanges.get(0);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentCreated", docChange.getEventId());
// TODO: understand why the life cycle is not good
// assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder2/doc2", docChange.getDocPath());
assertEquals(doc2.getId(), docChange.getDocUuid());
docChange = docChanges.get(1);
assertEquals("test", docChange.getRepositoryId());
assertEquals("documentCreated", docChange.getEventId());
// TODO: understand why the life cycle is not good
// assertEquals("project", docChange.getDocLifeCycleState());
assertEquals("/folder1/doc1", docChange.getDocPath());
assertEquals(doc1.getId(), docChange.getDocUuid());
Map<String, DocumentModel> changedDocModels = docChangeSummary.getChangedDocModels();
assertEquals(2, changedDocModels.size());
DocumentModel changedDoc = changedDocModels.get(doc1.getId());
assertNotNull(changedDoc);
assertEquals(doc1.getId(), changedDoc.getId());
assertEquals("/folder1/doc1", changedDoc.getPathAsString());
assertEquals("doc1", changedDoc.getName());
assertEquals("doc1", changedDoc.getTitle());
assertEquals("File", changedDoc.getType());
assertEquals("project", changedDoc.getCurrentLifeCycleState());
changedDoc = changedDocModels.get(doc2.getId());
assertNotNull(changedDoc);
assertEquals(doc2.getId(), changedDoc.getId());
assertEquals("/folder2/doc2", changedDoc.getPathAsString());
assertEquals("doc2", changedDoc.getName());
assertEquals("doc2", changedDoc.getTitle());
assertEquals("File", changedDoc.getType());
assertEquals("project", changedDoc.getCurrentLifeCycleState());
assertEquals("found_changes", docChangeSummary.getStatusCode());
// Create a document that should not be synchronized because not
// adaptable as a FileSystemItem (not Folderish nor a BlobHolder with a
// blob) => should not be considered as a change
TransactionHelper.startTransaction();
session.createDocument(session.createDocumentModel("/folder1",
"notSynchronizableDoc", "NotSynchronizable"));
commitAndWaitForAsyncCompletion();
docChangeSummary = getDocumentChangeSummary("Administrator");
assertTrue(docChangeSummary.getDocumentChanges().isEmpty());
assertTrue(docChangeSummary.getChangedDocModels().isEmpty());
assertEquals("no_changes", docChangeSummary.getStatusCode());
// Create 2 documents in the same sync root: "/folder1" and 1 document
// in another sync root => should find 2 changes for "/folder1"
TransactionHelper.startTransaction();
DocumentModel doc3 = session.createDocumentModel("/folder1", "doc3",
"File");
doc3.setPropertyValue("file:content", new StringBlob(
"The content of file 3."));
doc3 = session.createDocument(doc3);
DocumentModel doc4 = session.createDocumentModel("/folder1", "doc4",
"File");
doc4.setPropertyValue("file:content", new StringBlob(
"The content of file 4."));
doc4 = session.createDocument(doc4);
DocumentModel doc5 = session.createDocumentModel("/folder2", "doc5",
"File");
doc5.setPropertyValue("file:content", new StringBlob(
"The content of file 5."));
doc5 = session.createDocument(doc5);
commitAndWaitForAsyncCompletion();
docChangeSummary = getFolderDocumentChangeSummary("/folder1");
expectedSyncRootPaths.remove("/folder2");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
assertEquals(2, docChangeSummary.getDocumentChanges().size());
assertEquals(2, docChangeSummary.getChangedDocModels().size());
assertEquals("found_changes", docChangeSummary.getStatusCode());
// No changes since last successful sync
docChangeSummary = getDocumentChangeSummary("Administrator");
expectedSyncRootPaths.add("/folder2");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
assertTrue(docChangeSummary.getDocumentChanges().isEmpty());
assertTrue(docChangeSummary.getChangedDocModels().isEmpty());
assertEquals("no_changes", docChangeSummary.getStatusCode());
// Too many changes
TransactionHelper.startTransaction();
session.followTransition(doc1.getRef(), "delete");
session.followTransition(doc2.getRef(), "delete");
commitAndWaitForAsyncCompletion();
Framework.getProperties().put("org.nuxeo.drive.document.change.limit",
"1");
docChangeSummary = getDocumentChangeSummary("Administrator");
assertEquals(expectedSyncRootPaths, docChangeSummary.getSyncRootPaths());
assertTrue(docChangeSummary.getDocumentChanges().isEmpty());
assertTrue(docChangeSummary.getChangedDocModels().isEmpty());
assertEquals("too_many_changes", docChangeSummary.getStatusCode());
}
/**
* Gets the document changes using the {@link AuditDocumentChangeFinder} and
* updates the {@link #lastSuccessfulSync} date.
*/
protected List<DocumentChange> getDocumentChanges()
throws TooManyDocumentChangesException {
List<DocumentChange> docChanges = documentChangeFinder.getDocumentChanges(
true,
session,
syncRootPaths,
lastSuccessfulSync,
Integer.parseInt(Framework.getProperty("org.nuxeo.drive.document.change.limit")));
assertNotNull(docChanges);
lastSuccessfulSync = System.currentTimeMillis();
return docChanges;
}
/**
* Gets the document changes summary for the given user's synchronization
* roots using the {@link NuxeoDriveManager} and updates the
* {@link #lastSuccessfulSync} date.
*/
protected DocumentChangeSummary getDocumentChangeSummary(String userName)
throws ClientException {
DocumentChangeSummary docChangeSummary = nuxeoDriveManager.getDocumentChangeSummary(
true, userName, session, lastSuccessfulSync);
assertNotNull(docChangeSummary);
lastSuccessfulSync = docChangeSummary.getSyncDate();
return docChangeSummary;
}
/**
* Gets the document changes summary for the given folder using the
* {@link NuxeoDriveManager} and updates the {@link #lastSuccessfulSync}
* date.
*/
protected DocumentChangeSummary getFolderDocumentChangeSummary(
String folderPath) throws ClientException {
DocumentChangeSummary docChangeSummary = nuxeoDriveManager.getFolderDocumentChangeSummary(
folderPath, session, lastSuccessfulSync);
assertNotNull(docChangeSummary);
lastSuccessfulSync = docChangeSummary.getSyncDate();
return docChangeSummary;
}
protected void commitAndWaitForAsyncCompletion() throws Exception {
TransactionHelper.commitOrRollbackTransaction();
dispose(session);
eventService.waitForAsyncCompletion();
}
protected void dispose(CoreSession session) throws Exception {
if (Proxy.isProxyClass(session.getClass())) {
InvocationHandler handler = Proxy.getInvocationHandler(session);
if (handler instanceof TransactionalCoreSessionWrapper) {
Field field = TransactionalCoreSessionWrapper.class.getDeclaredField("session");
field.setAccessible(true);
session = (CoreSession) field.get(handler);
}
}
if (!(session instanceof LocalSession)) {
throw new UnsupportedOperationException(
"Cannot dispose session of class " + session.getClass());
}
((LocalSession) session).getSession().dispose();
}
}
|
Fix comment
|
nuxeo-drive-server/nuxeo-drive-core/src/test/java/org/nuxeo/drive/service/TestAuditDocumentChangeFinder.java
|
Fix comment
|
|
Java
|
apache-2.0
|
3368226d49640f24f9cf03bc5e78761024e5ccc3
| 0
|
sequenceiq/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak,sequenceiq/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak,sequenceiq/cloudbreak
|
package com.sequenceiq.cloudbreak.service.stack;
import java.util.Map;
import java.util.Set;
import javax.annotation.Resource;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.stereotype.Service;
import org.springframework.util.DigestUtils;
import com.google.common.base.Optional;
import com.sequenceiq.cloudbreak.conf.ReactorConfig;
import com.sequenceiq.cloudbreak.controller.BadRequestException;
import com.sequenceiq.cloudbreak.controller.NotFoundException;
import com.sequenceiq.cloudbreak.domain.CbUser;
import com.sequenceiq.cloudbreak.domain.CbUserRole;
import com.sequenceiq.cloudbreak.domain.CloudPlatform;
import com.sequenceiq.cloudbreak.domain.InstanceMetaData;
import com.sequenceiq.cloudbreak.domain.Stack;
import com.sequenceiq.cloudbreak.domain.StackDescription;
import com.sequenceiq.cloudbreak.domain.Status;
import com.sequenceiq.cloudbreak.domain.StatusRequest;
import com.sequenceiq.cloudbreak.domain.Template;
import com.sequenceiq.cloudbreak.logger.MDCBuilder;
import com.sequenceiq.cloudbreak.repository.ClusterRepository;
import com.sequenceiq.cloudbreak.repository.RetryingStackUpdater;
import com.sequenceiq.cloudbreak.repository.StackRepository;
import com.sequenceiq.cloudbreak.repository.TemplateRepository;
import com.sequenceiq.cloudbreak.service.DuplicateKeyValueException;
import com.sequenceiq.cloudbreak.service.cluster.flow.AmbariClusterConnector;
import com.sequenceiq.cloudbreak.service.stack.connector.ProvisionSetup;
import com.sequenceiq.cloudbreak.service.stack.event.ProvisionRequest;
import com.sequenceiq.cloudbreak.service.stack.event.StackDeleteRequest;
import com.sequenceiq.cloudbreak.service.stack.event.StackStatusUpdateRequest;
import com.sequenceiq.cloudbreak.service.stack.event.UpdateInstancesRequest;
import com.sequenceiq.cloudbreak.service.stack.flow.DescribeContext;
import com.sequenceiq.cloudbreak.service.stack.flow.MetadataIncompleteException;
import reactor.core.Reactor;
import reactor.event.Event;
@Service
public class DefaultStackService implements StackService {
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultStackService.class);
@Autowired
private StackRepository stackRepository;
@Autowired
private TemplateRepository templateRepository;
@Autowired
private RetryingStackUpdater stackUpdater;
@Autowired
private Reactor reactor;
@Autowired
private ClusterRepository clusterRepository;
@Autowired
private DescribeContext describeContext;
@Autowired
private AmbariClusterConnector ambariClusterConnector;
@Resource
private Map<CloudPlatform, ProvisionSetup> provisionSetups;
@Override
public Set<Stack> retrievePrivateStacks(CbUser user) {
return stackRepository.findForUser(user.getUserId());
}
@Override
public Set<Stack> retrieveAccountStacks(CbUser user) {
if (user.getRoles().contains(CbUserRole.ADMIN)) {
return stackRepository.findAllInAccount(user.getAccount());
} else {
return stackRepository.findPublicsInAccount(user.getAccount());
}
}
@Override
public Stack get(Long id) {
Stack stack = stackRepository.findOne(id);
MDCBuilder.buildMdcContext(stack);
if (stack == null) {
throw new NotFoundException(String.format("Stack '%s' not found", id));
}
return stack;
}
@Override
public Stack get(String ambariAddress) {
Stack stack = stackRepository.findByAmbari(ambariAddress);
if (stack == null) {
throw new NotFoundException(String.format("Stack not found by Ambari address: '%s' not found", ambariAddress));
}
return stack;
}
@Override
public Stack create(CbUser user, Stack stack) {
MDCBuilder.buildMdcContext(stack);
Stack savedStack = null;
Template template = templateRepository.findOne(stack.getTemplate().getId());
stack.setOwner(user.getUserId());
stack.setAccount(user.getAccount());
stack.setHash(generateHash(stack));
Optional<String> result = provisionSetups.get(stack.getTemplate().cloudPlatform()).preProvisionCheck(stack);
if (result.isPresent()) {
throw new BadRequestException(result.orNull());
} else {
try {
savedStack = stackRepository.save(stack);
LOGGER.info("Publishing {} event [StackId: '{}']", ReactorConfig.PROVISION_REQUEST_EVENT, stack.getId());
reactor.notify(ReactorConfig.PROVISION_REQUEST_EVENT, Event.wrap(new ProvisionRequest(template.cloudPlatform(), stack.getId())));
} catch (DataIntegrityViolationException ex) {
throw new DuplicateKeyValueException(stack.getName(), ex);
}
return savedStack;
}
}
@Override
public void delete(Long id) {
Stack stack = stackRepository.findOne(id);
MDCBuilder.buildMdcContext(stack);
LOGGER.info("Stack delete requested.");
if (stack == null) {
throw new NotFoundException(String.format("Stack '%s' not found", id));
}
LOGGER.info("Publishing {} event.", ReactorConfig.DELETE_REQUEST_EVENT);
reactor.notify(ReactorConfig.DELETE_REQUEST_EVENT, Event.wrap(new StackDeleteRequest(stack.getTemplate().cloudPlatform(), stack.getId())));
}
@Override
public void updateStatus(Long stackId, StatusRequest status) {
Stack stack = stackRepository.findOne(stackId);
MDCBuilder.buildMdcContext(stack);
Status stackStatus = stack.getStatus();
if (status.equals(StatusRequest.STARTED)) {
if (!Status.STOPPED.equals(stackStatus)) {
throw new BadRequestException(String.format("Cannot update the status of stack '%s' to STARTED, because it isn't in STOPPED state.", stackId));
}
stackUpdater.updateStackStatus(stackId, Status.START_IN_PROGRESS);
LOGGER.info("Publishing {} event", ReactorConfig.STACK_STATUS_UPDATE_EVENT);
reactor.notify(ReactorConfig.STACK_STATUS_UPDATE_EVENT,
Event.wrap(new StackStatusUpdateRequest(stack.getTemplate().cloudPlatform(), stack.getId(), status)));
} else {
Status clusterStatus = clusterRepository.findOneWithLists(stack.getCluster().getId()).getStatus();
if (Status.STOP_IN_PROGRESS.equals(clusterStatus)) {
stackUpdater.updateStackStatus(stackId, Status.STOP_REQUESTED);
} else {
if (!Status.AVAILABLE.equals(stackStatus)) {
throw new BadRequestException(
String.format("Cannot update the status of stack '%s' to STOPPED, because it isn't in AVAILABLE state.", stackId));
}
if (!Status.STOPPED.equals(clusterStatus)) {
throw new BadRequestException(
String.format("Cannot update the status of stack '%s' to STOPPED, because the cluster is not in STOPPED state.", stackId));
}
LOGGER.info("Publishing {} event.", ReactorConfig.STACK_STATUS_UPDATE_EVENT);
reactor.notify(ReactorConfig.STACK_STATUS_UPDATE_EVENT,
Event.wrap(new StackStatusUpdateRequest(stack.getTemplate().cloudPlatform(), stack.getId(), status)));
}
}
}
@Override
public void updateNodeCount(Long stackId, Integer scalingAdjustment) {
Stack stack = stackRepository.findOne(stackId);
MDCBuilder.buildMdcContext(stack);
if (!Status.AVAILABLE.equals(stack.getStatus())) {
throw new BadRequestException(String.format("Stack '%s' is currently in '%s' state. Node count can only be updated if it's running.", stackId,
stack.getStatus()));
}
if (0 == scalingAdjustment) {
throw new BadRequestException(String.format("Requested scaling adjustment on stack '%s' is 0. Nothing to do.", stackId));
}
if (0 > scalingAdjustment) {
if (-1 * scalingAdjustment > stack.getNodeCount()) {
throw new BadRequestException(String.format("There are %s instances in stack '%s'. Cannot remove %s instances.", stack.getNodeCount(), stackId,
-1 * scalingAdjustment));
}
int removeableHosts = 0;
for (InstanceMetaData metadataEntry : stack.getInstanceMetaData()) {
if (metadataEntry.isRemovable()) {
removeableHosts++;
}
}
if (removeableHosts < -1 * scalingAdjustment) {
throw new BadRequestException(
String.format("There are %s removable hosts on stack '%s' but %s were requested. Decomission nodes from the cluster first!",
removeableHosts, stackId, scalingAdjustment * -1));
}
}
stackUpdater.updateStackStatus(stack.getId(), Status.UPDATE_IN_PROGRESS);
LOGGER.info("Publishing {} event [scalingAdjustment: '{}']", ReactorConfig.UPDATE_INSTANCES_REQUEST_EVENT, scalingAdjustment);
reactor.notify(ReactorConfig.UPDATE_INSTANCES_REQUEST_EVENT,
Event.wrap(new UpdateInstancesRequest(stack.getTemplate().cloudPlatform(), stack.getId(), scalingAdjustment)));
}
@Override
public StackDescription getStackDescription(Stack stack) {
MDCBuilder.buildMdcContext(stack);
ambariClusterConnector.checkClusterState(stack);
CloudPlatform cp = stack.getTemplate().cloudPlatform();
LOGGER.debug("Getting stack description for cloud platform: {} ...", cp);
StackDescription description = describeContext.describeStackWithResources(stack);
LOGGER.debug("Found stack description {}", description.getClass());
return description;
}
@Override
public Set<InstanceMetaData> getMetaData(String hash) {
Stack stack = stackRepository.findStackByHash(hash);
if (stack != null) {
if (!stack.isMetadataReady()) {
throw new MetadataIncompleteException("Instance metadata is incomplete.");
}
if (!stack.getInstanceMetaData().isEmpty()) {
return stack.getInstanceMetaData();
}
}
throw new NotFoundException("Metadata not found on stack.");
}
private String generateHash(Stack stack) {
int hashCode = HashCodeBuilder.reflectionHashCode(stack);
return DigestUtils.md5DigestAsHex(String.valueOf(hashCode).getBytes());
}
}
|
src/main/java/com/sequenceiq/cloudbreak/service/stack/DefaultStackService.java
|
package com.sequenceiq.cloudbreak.service.stack;
import java.util.Map;
import java.util.Set;
import javax.annotation.Resource;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.stereotype.Service;
import org.springframework.util.DigestUtils;
import com.google.common.base.Optional;
import com.sequenceiq.cloudbreak.conf.ReactorConfig;
import com.sequenceiq.cloudbreak.controller.BadRequestException;
import com.sequenceiq.cloudbreak.controller.NotFoundException;
import com.sequenceiq.cloudbreak.domain.CbUser;
import com.sequenceiq.cloudbreak.domain.CbUserRole;
import com.sequenceiq.cloudbreak.domain.CloudPlatform;
import com.sequenceiq.cloudbreak.domain.InstanceMetaData;
import com.sequenceiq.cloudbreak.domain.Stack;
import com.sequenceiq.cloudbreak.domain.StackDescription;
import com.sequenceiq.cloudbreak.domain.Status;
import com.sequenceiq.cloudbreak.domain.StatusRequest;
import com.sequenceiq.cloudbreak.domain.Template;
import com.sequenceiq.cloudbreak.logger.MDCBuilder;
import com.sequenceiq.cloudbreak.repository.ClusterRepository;
import com.sequenceiq.cloudbreak.repository.RetryingStackUpdater;
import com.sequenceiq.cloudbreak.repository.StackRepository;
import com.sequenceiq.cloudbreak.repository.TemplateRepository;
import com.sequenceiq.cloudbreak.service.DuplicateKeyValueException;
import com.sequenceiq.cloudbreak.service.cluster.flow.AmbariClusterConnector;
import com.sequenceiq.cloudbreak.service.stack.connector.ProvisionSetup;
import com.sequenceiq.cloudbreak.service.stack.event.ProvisionRequest;
import com.sequenceiq.cloudbreak.service.stack.event.StackDeleteRequest;
import com.sequenceiq.cloudbreak.service.stack.event.StackStatusUpdateRequest;
import com.sequenceiq.cloudbreak.service.stack.event.UpdateInstancesRequest;
import com.sequenceiq.cloudbreak.service.stack.flow.DescribeContext;
import com.sequenceiq.cloudbreak.service.stack.flow.MetadataIncompleteException;
import reactor.core.Reactor;
import reactor.event.Event;
@Service
public class DefaultStackService implements StackService {
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultStackService.class);
@Autowired
private StackRepository stackRepository;
@Autowired
private TemplateRepository templateRepository;
@Autowired
private RetryingStackUpdater stackUpdater;
@Autowired
private Reactor reactor;
@Autowired
private ClusterRepository clusterRepository;
@Autowired
private DescribeContext describeContext;
@Autowired
private AmbariClusterConnector ambariClusterConnector;
@Resource
private Map<CloudPlatform, ProvisionSetup> provisionSetups;
@Override
public Set<Stack> retrievePrivateStacks(CbUser user) {
return stackRepository.findForUser(user.getUserId());
}
@Override
public Set<Stack> retrieveAccountStacks(CbUser user) {
if (user.getRoles().contains(CbUserRole.ADMIN)) {
return stackRepository.findAllInAccount(user.getAccount());
} else {
return stackRepository.findPublicsInAccount(user.getAccount());
}
}
@Override
public Stack get(Long id) {
Stack stack = stackRepository.findOne(id);
MDCBuilder.buildMdcContext(stack);
if (stack == null) {
throw new NotFoundException(String.format("Stack '%s' not found", id));
}
return stack;
}
@Override
public Stack get(String ambariAddress) {
Stack stack = stackRepository.findByAmbari(ambariAddress);
if (stack == null) {
throw new NotFoundException(String.format("Stack not found by Ambari address: '%s' not found", ambariAddress));
}
return stack;
}
@Override
public Stack create(CbUser user, Stack stack) {
MDCBuilder.buildMdcContext(stack);
Stack savedStack = null;
Template template = templateRepository.findOne(stack.getTemplate().getId());
stack.setOwner(user.getUserId());
stack.setAccount(user.getAccount());
stack.setHash(generateHash(stack));
Optional<String> result = provisionSetups.get(stack.getTemplate().cloudPlatform()).preProvisionCheck(stack);
if (result.isPresent()) {
throw new BadRequestException(result.orNull());
} else {
try {
savedStack = stackRepository.save(stack);
LOGGER.info("Publishing {} event [StackId: '{}']", ReactorConfig.PROVISION_REQUEST_EVENT, stack.getId());
reactor.notify(ReactorConfig.PROVISION_REQUEST_EVENT, Event.wrap(new ProvisionRequest(template.cloudPlatform(), stack.getId())));
} catch (DataIntegrityViolationException ex) {
throw new DuplicateKeyValueException(stack.getName(), ex);
}
return savedStack;
}
}
@Override
public void delete(Long id) {
Stack stack = stackRepository.findOne(id);
MDCBuilder.buildMdcContext(stack);
LOGGER.info("Stack delete requested.");
if (stack == null) {
throw new NotFoundException(String.format("Stack '%s' not found", id));
}
LOGGER.info("Publishing {} event.", ReactorConfig.DELETE_REQUEST_EVENT);
reactor.notify(ReactorConfig.DELETE_REQUEST_EVENT, Event.wrap(new StackDeleteRequest(stack.getTemplate().cloudPlatform(), stack.getId())));
}
@Override
public void updateStatus(Long stackId, StatusRequest status) {
Stack stack = stackRepository.findOne(stackId);
MDCBuilder.buildMdcContext(stack);
Status stackStatus = stack.getStatus();
if (status.equals(StatusRequest.STARTED)) {
if (!Status.STOPPED.equals(stackStatus)) {
throw new BadRequestException(String.format("Cannot update the status of stack '%s' to STARTED, because it isn't in STOPPED state.", stackId));
}
stack.setStatus(Status.START_IN_PROGRESS);
stackRepository.save(stack);
LOGGER.info("Publishing {} event", ReactorConfig.STACK_STATUS_UPDATE_EVENT);
reactor.notify(ReactorConfig.STACK_STATUS_UPDATE_EVENT,
Event.wrap(new StackStatusUpdateRequest(stack.getTemplate().cloudPlatform(), stack.getId(), status)));
} else {
Status clusterStatus = clusterRepository.findOneWithLists(stack.getCluster().getId()).getStatus();
if (Status.STOP_IN_PROGRESS.equals(clusterStatus)) {
stack.setStatus(Status.STOP_REQUESTED);
stackRepository.save(stack);
} else {
if (!Status.AVAILABLE.equals(stackStatus)) {
throw new BadRequestException(
String.format("Cannot update the status of stack '%s' to STOPPED, because it isn't in AVAILABLE state.", stackId));
}
if (!Status.STOPPED.equals(clusterStatus)) {
throw new BadRequestException(
String.format("Cannot update the status of stack '%s' to STOPPED, because the cluster is not in STOPPED state.", stackId));
}
LOGGER.info("Publishing {} event.", ReactorConfig.STACK_STATUS_UPDATE_EVENT);
reactor.notify(ReactorConfig.STACK_STATUS_UPDATE_EVENT,
Event.wrap(new StackStatusUpdateRequest(stack.getTemplate().cloudPlatform(), stack.getId(), status)));
}
}
}
@Override
public void updateNodeCount(Long stackId, Integer scalingAdjustment) {
Stack stack = stackRepository.findOne(stackId);
MDCBuilder.buildMdcContext(stack);
if (!Status.AVAILABLE.equals(stack.getStatus())) {
throw new BadRequestException(String.format("Stack '%s' is currently in '%s' state. Node count can only be updated if it's running.", stackId,
stack.getStatus()));
}
if (0 == scalingAdjustment) {
throw new BadRequestException(String.format("Requested scaling adjustment on stack '%s' is 0. Nothing to do.", stackId));
}
if (0 > scalingAdjustment) {
if (-1 * scalingAdjustment > stack.getNodeCount()) {
throw new BadRequestException(String.format("There are %s instances in stack '%s'. Cannot remove %s instances.", stack.getNodeCount(), stackId,
-1 * scalingAdjustment));
}
int removeableHosts = 0;
for (InstanceMetaData metadataEntry : stack.getInstanceMetaData()) {
if (metadataEntry.isRemovable()) {
removeableHosts++;
}
}
if (removeableHosts < -1 * scalingAdjustment) {
throw new BadRequestException(
String.format("There are %s removable hosts on stack '%s' but %s were requested. Decomission nodes from the cluster first!",
removeableHosts, stackId, scalingAdjustment * -1));
}
}
stackUpdater.updateStackStatus(stack.getId(), Status.UPDATE_IN_PROGRESS);
LOGGER.info("Publishing {} event [scalingAdjustment: '{}']", ReactorConfig.UPDATE_INSTANCES_REQUEST_EVENT, scalingAdjustment);
reactor.notify(ReactorConfig.UPDATE_INSTANCES_REQUEST_EVENT,
Event.wrap(new UpdateInstancesRequest(stack.getTemplate().cloudPlatform(), stack.getId(), scalingAdjustment)));
}
@Override
public StackDescription getStackDescription(Stack stack) {
MDCBuilder.buildMdcContext(stack);
ambariClusterConnector.checkClusterState(stack);
CloudPlatform cp = stack.getTemplate().cloudPlatform();
LOGGER.debug("Getting stack description for cloud platform: {} ...", cp);
StackDescription description = describeContext.describeStackWithResources(stack);
LOGGER.debug("Found stack description {}", description.getClass());
return description;
}
@Override
public Set<InstanceMetaData> getMetaData(String hash) {
Stack stack = stackRepository.findStackByHash(hash);
if (stack != null) {
if (!stack.isMetadataReady()) {
throw new MetadataIncompleteException("Instance metadata is incomplete.");
}
if (!stack.getInstanceMetaData().isEmpty()) {
return stack.getInstanceMetaData();
}
}
throw new NotFoundException("Metadata not found on stack.");
}
private String generateHash(Stack stack) {
int hashCode = HashCodeBuilder.reflectionHashCode(stack);
return DigestUtils.md5DigestAsHex(String.valueOf(hashCode).getBytes());
}
}
|
CLOUD-282 update stack status with stack updater in DefaultStackService
|
src/main/java/com/sequenceiq/cloudbreak/service/stack/DefaultStackService.java
|
CLOUD-282 update stack status with stack updater in DefaultStackService
|
|
Java
|
apache-2.0
|
400cc7842544514fa1f7f50b1005404b2eb5e76f
| 0
|
kexinrong/macrobase,kexinrong/macrobase,kexinrong/macrobase,stanford-futuredata/macrobase,stanford-futuredata/macrobase,stanford-futuredata/macrobase,kexinrong/macrobase,kexinrong/macrobase,kexinrong/macrobase,kexinrong/macrobase,stanford-futuredata/macrobase,stanford-futuredata/macrobase
|
package macrobase.ingest;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import io.dropwizard.db.DataSourceFactory;
import io.dropwizard.db.ManagedDataSource;
import macrobase.MacroBase;
import macrobase.datamodel.Datum;
import macrobase.ingest.result.ColumnValue;
import macrobase.ingest.result.RowSet;
import macrobase.ingest.result.Schema;
import macrobase.runtime.resources.RowSetResource;
import org.apache.commons.math3.linear.ArrayRealVector;
import org.apache.commons.math3.linear.RealVector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
public abstract class SQLLoader extends DataLoader{
abstract public String getDriverClass();
abstract public String getJDBCUrlPrefix();
@SuppressWarnings("unused")
private static final Logger log = LoggerFactory.getLogger(SQLLoader.class);
private ManagedDataSource source;
private Connection connection;
private String dbUser;
private String dbPassword;
private String removeLimit(String sql) {
return sql.replaceAll("LIMIT\\s\\d+", "");
}
private String removeSqlJunk(String sql) {
return sql.replaceAll(";", "");
}
public void setDatabaseCredentials(String user, String password) {
this.dbUser = user;
this.dbPassword = password;
}
public void connect(String pgUrl) throws SQLException {
DataSourceFactory factory = new DataSourceFactory();
factory.setDriverClass(getDriverClass());
factory.setUrl(getJDBCUrlPrefix()+pgUrl);
if (this.dbUser != null) {
factory.setUser(this.dbUser);
}
if (this.dbPassword != null) {
factory.setPassword(this.dbPassword);
}
source = factory.build(MacroBase.metrics, "postgres");
connection = source.getConnection();
}
public Schema getSchema(String baseQuery)
throws SQLException {
Statement stmt = connection.createStatement();
String sql = String.format("%s LIMIT 1", removeSqlJunk(removeLimit(baseQuery)));
ResultSet rs = stmt.executeQuery(sql);
List<Schema.SchemaColumn> columns = Lists.newArrayList();
for(int i = 1; i <= rs.getMetaData().getColumnCount(); ++i) {
columns.add(new Schema.SchemaColumn(rs.getMetaData().getColumnName(i),
rs.getMetaData().getColumnTypeName(i)));
}
return new Schema(columns);
}
public RowSet getRows(String baseQuery,
List<RowSetResource.RowSetRequest.RowRequestPair> preds,
int limit,
int offset) throws SQLException {
Statement stmt = connection.createStatement();
String sql = removeSqlJunk(removeLimit(baseQuery));
if(preds.size() > 0) {
StringJoiner sj = new StringJoiner(" AND ");
preds.stream().forEach(e -> sj.add(String.format("%s = '%s'", e.column, e.value)));
if(!sql.toLowerCase().contains("where")) {
sql += " WHERE ";
} else {
sql += " AND ";
}
sql += sj.toString();
}
sql += String.format(" LIMIT %d OFFSET %d", limit, offset);
ResultSet rs = stmt.executeQuery(sql);
List<RowSet.Row> rows = Lists.newArrayList();
while(rs.next()) {
List<ColumnValue> columnValues = Lists.newArrayList();
for(int i = 1; i <= rs.getMetaData().getColumnCount(); ++i) {
columnValues.add(
new ColumnValue(rs.getMetaData().getColumnName(i),
rs.getString(i)));
}
rows.add(new RowSet.Row(columnValues));
}
return new RowSet(rows);
}
public List<Datum> getData(DatumEncoder encoder,
List<String> attributes,
List<String> lowMetrics,
List<String> highMetrics,
String baseQuery) throws SQLException, IOException {
String targetColumns = StreamSupport.stream(
Iterables.concat(attributes, lowMetrics, highMetrics).spliterator(), false)
.collect(Collectors.joining(", "));
String sql = String.format("SELECT %s FROM (%s) baseQuery",
targetColumns,
removeSqlJunk(baseQuery));
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(sql);
for(int i = 1; i <= rs.getMetaData().getColumnCount(); ++i) {
encoder.recordAttributeName(i, rs.getMetaData().getColumnName(i));
}
List<Datum> ret = Lists.newArrayList();
RealVector metricWiseMinVec = new ArrayRealVector(lowMetrics.size() + highMetrics.size());
RealVector metricWiseMaxVec = new ArrayRealVector(lowMetrics.size() + highMetrics.size());
while(rs.next()) {
List<Integer> attrList = new ArrayList<>(attributes.size());
int i = 1;
for(; i <= attributes.size(); ++i) {
attrList.add(encoder.getIntegerEncoding(i, rs.getString(i)));
}
RealVector metricVec = new ArrayRealVector(lowMetrics.size() + highMetrics.size());
int vecPos = 0;
for(; i <= attributes.size() + lowMetrics.size(); ++i) {
double val = Math.pow(Math.max(rs.getDouble(i), 0.1), -1);
metricVec.setEntry(vecPos, val);
if(metricWiseMinVec.getEntry(vecPos) > val) {
metricWiseMinVec.setEntry(vecPos, val);
}
if(metricWiseMaxVec.getEntry(vecPos) < val) {
metricWiseMaxVec.setEntry(vecPos, val);
}
vecPos += 1;
}
for(; i <= attributes.size() + lowMetrics.size() + highMetrics.size(); ++i) {
double val = rs.getDouble(i);
metricVec.setEntry(vecPos, val);
if(metricWiseMinVec.getEntry(vecPos) > val) {
metricWiseMinVec.setEntry(vecPos, val);
}
if(metricWiseMaxVec.getEntry(vecPos) < val) {
metricWiseMaxVec.setEntry(vecPos, val);
}
vecPos += 1;
}
ret.add(new Datum(attrList, metricVec));
}
// normalize data
for(Datum d : ret) {
// ebeDivide returns a copy; avoid a copy at the expense of ugly code
RealVector metrics = d.getMetrics();
for(int dim = 0; dim < metrics.getDimension(); ++dim) {
double dimMin = metricWiseMinVec.getEntry(dim);
double dimMax = metricWiseMaxVec.getEntry(dim);
if(dimMax - dimMin == 0) {
log.warn("No difference between min and max in dimension {}!", dim);
metrics.setEntry(dim, 0);
continue;
}
double cur = metrics.getEntry(dim);
metrics.setEntry(dim, (cur - dimMin)/(dimMax - dimMin));
}
}
return ret;
}
}
|
src/main/java/macrobase/ingest/SQLLoader.java
|
package macrobase.ingest;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import io.dropwizard.db.DataSourceFactory;
import io.dropwizard.db.ManagedDataSource;
import macrobase.MacroBase;
import macrobase.datamodel.Datum;
import macrobase.ingest.result.ColumnValue;
import macrobase.ingest.result.RowSet;
import macrobase.ingest.result.Schema;
import macrobase.runtime.resources.RowSetResource;
import org.apache.commons.math3.linear.ArrayRealVector;
import org.apache.commons.math3.linear.RealVector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
public abstract class SQLLoader extends DataLoader{
abstract public String getDriverClass();
abstract public String getJDBCUrlPrefix();
@SuppressWarnings("unused")
private static final Logger log = LoggerFactory.getLogger(SQLLoader.class);
private ManagedDataSource source;
private Connection connection;
private String dbUser;
private String dbPassword;
private String removeLimit(String sql) {
return sql.replaceAll("LIMIT\\s\\d+", "");
}
private String removeSqlJunk(String sql) {
return sql.replaceAll(";", "").replaceAll("'", "''");
}
public void setDatabaseCredentials(String user, String password) {
this.dbUser = user;
this.dbPassword = password;
}
public void connect(String pgUrl) throws SQLException {
DataSourceFactory factory = new DataSourceFactory();
factory.setDriverClass(getDriverClass());
factory.setUrl(getJDBCUrlPrefix()+pgUrl);
if (this.dbUser != null) {
factory.setUser(this.dbUser);
}
if (this.dbPassword != null) {
factory.setPassword(this.dbPassword);
}
source = factory.build(MacroBase.metrics, "postgres");
connection = source.getConnection();
}
public Schema getSchema(String baseQuery)
throws SQLException {
Statement stmt = connection.createStatement();
String sql = String.format("%s LIMIT 1", removeSqlJunk(removeLimit(baseQuery)));
ResultSet rs = stmt.executeQuery(sql);
List<Schema.SchemaColumn> columns = Lists.newArrayList();
for(int i = 1; i <= rs.getMetaData().getColumnCount(); ++i) {
columns.add(new Schema.SchemaColumn(rs.getMetaData().getColumnName(i),
rs.getMetaData().getColumnTypeName(i)));
}
return new Schema(columns);
}
public RowSet getRows(String baseQuery,
List<RowSetResource.RowSetRequest.RowRequestPair> preds,
int limit,
int offset) throws SQLException {
Statement stmt = connection.createStatement();
String sql = removeSqlJunk(removeLimit(baseQuery));
if(preds.size() > 0) {
StringJoiner sj = new StringJoiner(" AND ");
preds.stream().forEach(e -> sj.add(String.format("%s = '%s'", e.column, e.value)));
if(!sql.toLowerCase().contains("where")) {
sql += " WHERE ";
} else {
sql += " AND ";
}
sql += sj.toString();
}
sql += String.format(" LIMIT %d OFFSET %d", limit, offset);
ResultSet rs = stmt.executeQuery(sql);
List<RowSet.Row> rows = Lists.newArrayList();
while(rs.next()) {
List<ColumnValue> columnValues = Lists.newArrayList();
for(int i = 1; i <= rs.getMetaData().getColumnCount(); ++i) {
columnValues.add(
new ColumnValue(rs.getMetaData().getColumnName(i),
rs.getString(i)));
}
rows.add(new RowSet.Row(columnValues));
}
return new RowSet(rows);
}
public List<Datum> getData(DatumEncoder encoder,
List<String> attributes,
List<String> lowMetrics,
List<String> highMetrics,
String baseQuery) throws SQLException, IOException {
String targetColumns = StreamSupport.stream(
Iterables.concat(attributes, lowMetrics, highMetrics).spliterator(), false)
.collect(Collectors.joining(", "));
String sql = String.format("SELECT %s FROM (%s) baseQuery",
targetColumns,
removeSqlJunk(baseQuery));
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(sql);
for(int i = 1; i <= rs.getMetaData().getColumnCount(); ++i) {
encoder.recordAttributeName(i, rs.getMetaData().getColumnName(i));
}
List<Datum> ret = Lists.newArrayList();
RealVector metricWiseMinVec = new ArrayRealVector(lowMetrics.size() + highMetrics.size());
RealVector metricWiseMaxVec = new ArrayRealVector(lowMetrics.size() + highMetrics.size());
while(rs.next()) {
List<Integer> attrList = new ArrayList<>(attributes.size());
int i = 1;
for(; i <= attributes.size(); ++i) {
attrList.add(encoder.getIntegerEncoding(i, rs.getString(i)));
}
RealVector metricVec = new ArrayRealVector(lowMetrics.size() + highMetrics.size());
int vecPos = 0;
for(; i <= attributes.size() + lowMetrics.size(); ++i) {
double val = Math.pow(Math.max(rs.getDouble(i), 0.1), -1);
metricVec.setEntry(vecPos, val);
if(metricWiseMinVec.getEntry(vecPos) > val) {
metricWiseMinVec.setEntry(vecPos, val);
}
if(metricWiseMaxVec.getEntry(vecPos) < val) {
metricWiseMaxVec.setEntry(vecPos, val);
}
vecPos += 1;
}
for(; i <= attributes.size() + lowMetrics.size() + highMetrics.size(); ++i) {
double val = rs.getDouble(i);
metricVec.setEntry(vecPos, val);
if(metricWiseMinVec.getEntry(vecPos) > val) {
metricWiseMinVec.setEntry(vecPos, val);
}
if(metricWiseMaxVec.getEntry(vecPos) < val) {
metricWiseMaxVec.setEntry(vecPos, val);
}
vecPos += 1;
}
ret.add(new Datum(attrList, metricVec));
}
// normalize data
for(Datum d : ret) {
// ebeDivide returns a copy; avoid a copy at the expense of ugly code
RealVector metrics = d.getMetrics();
for(int dim = 0; dim < metrics.getDimension(); ++dim) {
double dimMin = metricWiseMinVec.getEntry(dim);
double dimMax = metricWiseMaxVec.getEntry(dim);
if(dimMax - dimMin == 0) {
log.warn("No difference between min and max in dimension {}!", dim);
metrics.setEntry(dim, 0);
continue;
}
double cur = metrics.getEntry(dim);
metrics.setEntry(dim, (cur - dimMin)/(dimMax - dimMin));
}
}
return ret;
}
}
|
Do not escape single quotes in SQLLoader.
|
src/main/java/macrobase/ingest/SQLLoader.java
|
Do not escape single quotes in SQLLoader.
|
|
Java
|
apache-2.0
|
d2253da4f9a29b507bb6468d98a80b8da1f08789
| 0
|
sjaco002/incubator-asterixdb,waans11/incubator-asterixdb,ty1er/incubator-asterixdb,ty1er/incubator-asterixdb,waans11/incubator-asterixdb,ty1er/incubator-asterixdb,waans11/incubator-asterixdb,heriram/incubator-asterixdb,kisskys/incubator-asterixdb,heriram/incubator-asterixdb,waans11/incubator-asterixdb,waans11/incubator-asterixdb,heriram/incubator-asterixdb,apache/incubator-asterixdb,kisskys/incubator-asterixdb,kisskys/incubator-asterixdb,parshimers/incubator-asterixdb,sjaco002/incubator-asterixdb,ecarm002/incubator-asterixdb,ty1er/incubator-asterixdb,kisskys/incubator-asterixdb,ty1er/incubator-asterixdb,apache/incubator-asterixdb,sjaco002/incubator-asterixdb,heriram/incubator-asterixdb,amoudi87/asterixdb,apache/incubator-asterixdb,parshimers/incubator-asterixdb,kisskys/incubator-asterixdb,amoudi87/asterixdb,amoudi87/asterixdb,kisskys/incubator-asterixdb,ecarm002/incubator-asterixdb,heriram/incubator-asterixdb,apache/incubator-asterixdb,ecarm002/incubator-asterixdb,waans11/incubator-asterixdb,parshimers/incubator-asterixdb,ecarm002/incubator-asterixdb,amoudi87/asterixdb,apache/incubator-asterixdb,kisskys/incubator-asterixdb,parshimers/incubator-asterixdb,apache/incubator-asterixdb,apache/incubator-asterixdb,sjaco002/incubator-asterixdb,ecarm002/incubator-asterixdb,parshimers/incubator-asterixdb,ty1er/incubator-asterixdb,waans11/incubator-asterixdb,amoudi87/asterixdb,amoudi87/asterixdb,sjaco002/incubator-asterixdb,ecarm002/incubator-asterixdb,heriram/incubator-asterixdb,parshimers/incubator-asterixdb,ecarm002/incubator-asterixdb,heriram/incubator-asterixdb,sjaco002/incubator-asterixdb
|
/*
* Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.aql.translator;
import java.io.File;
import java.io.PrintWriter;
import java.nio.ByteBuffer;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import edu.uci.ics.asterix.api.common.APIFramework;
import edu.uci.ics.asterix.api.common.APIFramework.DisplayFormat;
import edu.uci.ics.asterix.api.common.Job;
import edu.uci.ics.asterix.api.common.SessionConfig;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.ExternalDetailsDecl;
import edu.uci.ics.asterix.aql.expression.FeedDetailsDecl;
import edu.uci.ics.asterix.aql.expression.FunctionDecl;
import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
import edu.uci.ics.asterix.aql.expression.Identifier;
import edu.uci.ics.asterix.aql.expression.IndexDropStatement;
import edu.uci.ics.asterix.aql.expression.InsertStatement;
import edu.uci.ics.asterix.aql.expression.InternalDetailsDecl;
import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.Query;
import edu.uci.ics.asterix.aql.expression.SetStatement;
import edu.uci.ics.asterix.aql.expression.TypeDecl;
import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
import edu.uci.ics.asterix.aql.expression.WriteFromQueryResultStatement;
import edu.uci.ics.asterix.aql.expression.WriteStatement;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.config.GlobalConfig;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.file.DatasetOperations;
import edu.uci.ics.asterix.file.FeedOperations;
import edu.uci.ics.asterix.file.IndexOperations;
import edu.uci.ics.asterix.formats.base.IDataFormat;
import edu.uci.ics.asterix.metadata.IDatasetDetails;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.MetadataManager;
import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.metadata.entities.Index;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.types.TypeSignature;
import edu.uci.ics.asterix.result.ResultReader;
import edu.uci.ics.asterix.result.ResultUtils;
import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
import edu.uci.ics.asterix.transaction.management.service.transaction.DatasetIdFactory;
import edu.uci.ics.asterix.translator.AbstractAqlTranslator;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledBeginFeedStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledControlFeedStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDatasetDropStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDeleteStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledInsertStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledWriteFromQueryResultStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
import edu.uci.ics.asterix.translator.TypeTranslator;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
import edu.uci.ics.hyracks.algebricks.data.IAWriterFactory;
import edu.uci.ics.hyracks.algebricks.data.IResultSerializerFactoryProvider;
import edu.uci.ics.hyracks.algebricks.runtime.serializer.ResultSerializerFactoryProvider;
import edu.uci.ics.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
/*
* Provides functionality for executing a batch of AQL statements (queries included)
* sequentially.
*/
public class AqlTranslator extends AbstractAqlTranslator {
private final List<Statement> aqlStatements;
private final PrintWriter out;
private final SessionConfig sessionConfig;
private final DisplayFormat pdf;
private Dataverse activeDefaultDataverse;
private List<FunctionDecl> declaredFunctions;
public AqlTranslator(List<Statement> aqlStatements, PrintWriter out, SessionConfig pc, DisplayFormat pdf)
throws MetadataException, AsterixException {
this.aqlStatements = aqlStatements;
this.out = out;
this.sessionConfig = pc;
this.pdf = pdf;
declaredFunctions = getDeclaredFunctions(aqlStatements);
}
private List<FunctionDecl> getDeclaredFunctions(List<Statement> statements) {
List<FunctionDecl> functionDecls = new ArrayList<FunctionDecl>();
for (Statement st : statements) {
if (st.getKind().equals(Statement.Kind.FUNCTION_DECL)) {
functionDecls.add((FunctionDecl) st);
}
}
return functionDecls;
}
/**
* Compiles and submits for execution a list of AQL statements.
*
* @param hcc
* A Hyracks client connection that is used to submit a jobspec to Hyracks.
* @param hdc
* A Hyracks dataset client object that is used to read the results.
* @param asyncResults
* True if the results should be read asynchronously or false if we should wait for results to be read.
* @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
* @throws Exception
*/
public List<QueryResult> compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, boolean asyncResults)
throws Exception {
int resultSetIdCounter = 0;
List<QueryResult> executionResult = new ArrayList<QueryResult>();
FileSplit outputFile = null;
IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
IResultSerializerFactoryProvider resultSerializerFactoryProvider = ResultSerializerFactoryProvider.INSTANCE;
Map<String, String> config = new HashMap<String, String>();
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
for (Statement stmt : aqlStatements) {
validateOperation(activeDefaultDataverse, stmt);
AqlMetadataProvider metadataProvider = new AqlMetadataProvider(activeDefaultDataverse);
metadataProvider.setWriterFactory(writerFactory);
metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
metadataProvider.setOutputFile(outputFile);
metadataProvider.setConfig(config);
jobsToExecute.clear();
switch (stmt.getKind()) {
case SET: {
handleSetStatement(metadataProvider, stmt, config);
break;
}
case DATAVERSE_DECL: {
activeDefaultDataverse = handleUseDataverseStatement(metadataProvider, stmt);
break;
}
case CREATE_DATAVERSE: {
handleCreateDataverseStatement(metadataProvider, stmt);
break;
}
case DATASET_DECL: {
handleCreateDatasetStatement(metadataProvider, stmt, hcc);
break;
}
case CREATE_INDEX: {
handleCreateIndexStatement(metadataProvider, stmt, hcc);
break;
}
case TYPE_DECL: {
handleCreateTypeStatement(metadataProvider, stmt);
break;
}
case NODEGROUP_DECL: {
handleCreateNodeGroupStatement(metadataProvider, stmt);
break;
}
case DATAVERSE_DROP: {
handleDataverseDropStatement(metadataProvider, stmt, hcc);
break;
}
case DATASET_DROP: {
handleDatasetDropStatement(metadataProvider, stmt, hcc);
break;
}
case INDEX_DROP: {
handleIndexDropStatement(metadataProvider, stmt, hcc);
break;
}
case TYPE_DROP: {
handleTypeDropStatement(metadataProvider, stmt);
break;
}
case NODEGROUP_DROP: {
handleNodegroupDropStatement(metadataProvider, stmt);
break;
}
case CREATE_FUNCTION: {
handleCreateFunctionStatement(metadataProvider, stmt);
break;
}
case FUNCTION_DROP: {
handleFunctionDropStatement(metadataProvider, stmt);
break;
}
case LOAD_FROM_FILE: {
handleLoadFromFileStatement(metadataProvider, stmt, hcc);
break;
}
case WRITE_FROM_QUERY_RESULT: {
handleWriteFromQueryResultStatement(metadataProvider, stmt, hcc);
break;
}
case INSERT: {
handleInsertStatement(metadataProvider, stmt, hcc);
break;
}
case DELETE: {
handleDeleteStatement(metadataProvider, stmt, hcc);
break;
}
case BEGIN_FEED: {
handleBeginFeedStatement(metadataProvider, stmt, hcc);
break;
}
case CONTROL_FEED: {
handleControlFeedStatement(metadataProvider, stmt, hcc);
break;
}
case QUERY: {
metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
executionResult.add(handleQuery(metadataProvider, (Query) stmt, hcc, hdc, asyncResults));
break;
}
case WRITE: {
Pair<IAWriterFactory, FileSplit> result = handleWriteStatement(metadataProvider, stmt);
if (result.first != null) {
writerFactory = result.first;
}
outputFile = result.second;
break;
}
}
}
return executionResult;
}
private void handleSetStatement(AqlMetadataProvider metadataProvider, Statement stmt, Map<String, String> config)
throws RemoteException, ACIDException {
SetStatement ss = (SetStatement) stmt;
String pname = ss.getPropName();
String pvalue = ss.getPropValue();
config.put(pname, pvalue);
}
private Pair<IAWriterFactory, FileSplit> handleWriteStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws InstantiationException, IllegalAccessException, ClassNotFoundException {
WriteStatement ws = (WriteStatement) stmt;
File f = new File(ws.getFileName());
FileSplit outputFile = new FileSplit(ws.getNcName().getValue(), new FileReference(f));
IAWriterFactory writerFactory = null;
if (ws.getWriterClassName() != null) {
writerFactory = (IAWriterFactory) Class.forName(ws.getWriterClassName()).newInstance();
}
return new Pair<IAWriterFactory, FileSplit>(writerFactory, outputFile);
}
private Dataverse handleUseDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
DataverseDecl dvd = (DataverseDecl) stmt;
String dvName = dvd.getDataverseName().getValue();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
if (dv == null) {
throw new MetadataException("Unknown dataverse " + dvName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return dv;
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new MetadataException(e);
} finally {
releaseReadLatch();
}
}
private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, AlgebricksException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
String dvName = stmtCreateDataverse.getDataverseName().getValue();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
if (dv != null && !stmtCreateDataverse.getIfNotExists()) {
throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
}
MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName,
stmtCreateDataverse.getFormat(), IMetadataEntity.PENDING_NO_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws AsterixException, Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dataverseName = null;
String datasetName = null;
Dataset dataset = null;
try {
DatasetDecl dd = (DatasetDecl) stmt;
dataverseName = dd.getDataverse() != null ? dd.getDataverse().getValue()
: activeDefaultDataverse != null ? activeDefaultDataverse.getDataverseName() : null;
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
datasetName = dd.getName().getValue();
DatasetType dsType = dd.getDatasetType();
String itemTypeName = dd.getItemTypeName().getValue();
IDatasetDetails datasetDetails = null;
Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
if (ds != null) {
if (dd.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
}
}
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
itemTypeName);
if (dt == null) {
throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
}
switch (dd.getDatasetType()) {
case INTERNAL: {
IAType itemType = dt.getDatatype();
if (itemType.getTypeTag() != ATypeTag.RECORD) {
throw new AlgebricksException("Can only partition ARecord's.");
}
List<String> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
.getPartitioningExprs();
ARecordType aRecordType = (ARecordType) itemType;
aRecordType.validatePartitioningExpressions(partitioningExprs);
String ngName = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
ngName);
break;
}
case EXTERNAL: {
String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
datasetDetails = new ExternalDatasetDetails(adapter, properties);
break;
}
case FEED: {
IAType itemType = dt.getDatatype();
if (itemType.getTypeTag() != ATypeTag.RECORD) {
throw new AlgebricksException("Can only partition ARecord's.");
}
List<String> partitioningExprs = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
.getPartitioningExprs();
ARecordType aRecordType = (ARecordType) itemType;
aRecordType.validatePartitioningExpressions(partitioningExprs);
String ngName = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
String adapter = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getAdapterFactoryClassname();
Map<String, String> configuration = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
.getConfiguration();
FunctionSignature signature = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getFunctionSignature();
datasetDetails = new FeedDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
ngName, adapter, configuration, signature, FeedDatasetDetails.FeedState.INACTIVE.toString());
break;
}
}
//#. add a new dataset with PendingAddOp
dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType,
DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
dataverseName);
JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
metadataProvider);
//#. make metadataTxn commit before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
//#. runJob
runJob(hcc, jobSpec, true);
//#. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
//#. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), new Dataset(dataverseName,
datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType, dataset.getDatasetId(),
IMetadataEntity.PENDING_NO_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
if (dataset != null) {
//#. execute compensation operations
// remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
try {
JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, jobSpec, true);
} catch (Exception e3) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//do no throw exception since still the metadata needs to be compensated.
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dataverseName = null;
String datasetName = null;
String indexName = null;
JobSpecification spec = null;
try {
CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
dataverseName = stmtCreateIndex.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtCreateIndex.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
datasetName = stmtCreateIndex.getDatasetName().getValue();
Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
if (ds == null) {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
+ dataverseName);
}
indexName = stmtCreateIndex.getIndexName().getValue();
Index idx = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
String itemTypeName = ds.getItemTypeName();
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
itemTypeName);
IAType itemType = dt.getDatatype();
ARecordType aRecordType = (ARecordType) itemType;
aRecordType.validateKeyFields(stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getIndexType());
if (idx != null) {
if (!stmtCreateIndex.getIfNotExists()) {
throw new AlgebricksException("An index with this name " + indexName + " already exists.");
} else {
stmtCreateIndex.setNeedToCreate(false);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
}
}
//#. add a new index with PendingAddOp
Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
//#. create the index artifact in NC.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
if (spec == null) {
throw new AsterixException("Failed to create job spec for creating index '"
+ stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, spec, true);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
//#. load data into the index in NC.
cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, spec, true);
//#. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
//#. add another new index with PendingNoOp after deleting the index with PendingAddOp
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
indexName);
index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
IMetadataEntity.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
if (spec != null) {
//#. execute compensation operations
// remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
try {
JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, jobSpec, true);
} catch (Exception e3) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//do no throw exception since still the metadata needs to be compensated.
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws AlgebricksException, RemoteException, ACIDException, MetadataException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
TypeDecl stmtCreateType = (TypeDecl) stmt;
String dataverseName = stmtCreateType.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtCreateType.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
String typeName = stmtCreateType.getIdent().getValue();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
throw new AlgebricksException("Unknonw dataverse " + dataverseName);
}
Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (dt != null) {
if (!stmtCreateType.getIfNotExists()) {
throw new AlgebricksException("A datatype with this name " + typeName + " already exists.");
}
} else {
if (builtinTypeMap.get(typeName) != null) {
throw new AlgebricksException("Cannot redefine builtin type " + typeName + ".");
} else {
Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, (TypeDecl) stmt,
dataverseName);
TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
IAType type = typeMap.get(typeSignature);
MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
}
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dvName = null;
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
dvName = stmtDelete.getDataverseName().getValue();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dvName);
if (dv == null) {
if (!stmtDelete.getIfExists()) {
throw new AlgebricksException("There is no dataverse with this name " + dvName + ".");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
}
//#. prepare jobs which will drop corresponding datasets with indexes.
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dvName);
for (int j = 0; j < datasets.size(); j++) {
String datasetName = datasets.get(j).getDatasetName();
DatasetType dsType = datasets.get(j).getDatasetType();
if (dsType == DatasetType.INTERNAL || dsType == DatasetType.FEED) {
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dvName, datasetName);
for (int k = 0; k < indexes.size(); k++) {
if (indexes.get(k).isSecondaryIndex()) {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dvName, datasetName,
indexes.get(k).getIndexName());
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
}
}
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dvName, datasetName);
jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
}
}
//#. mark PendingDropOp on the dataverse record by
// first, deleting the dataverse record from the DATAVERSE_DATASET
// second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dvName, dv.getDataFormat(),
IMetadataEntity.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
//#. finally, delete the dataverse.
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dvName) {
activeDefaultDataverse = null;
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//#. execute compensation operations
// remove the all indexes in NC
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataverse(metadataProvider.getMetadataTxnContext(), dvName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleDatasetDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dataverseName = null;
String datasetName = null;
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
DropStatement stmtDelete = (DropStatement) stmt;
dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
datasetName = stmtDelete.getDatasetName().getValue();
Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
if (ds == null) {
if (!stmtDelete.getIfExists()) {
throw new AlgebricksException("There is no dataset with this name " + datasetName
+ " in dataverse " + dataverseName + ".");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
}
if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
//#. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (indexes.get(j).isSecondaryIndex()) {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexes.get(j).getIndexName());
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
}
}
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
//#. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(
mdTxnCtx,
new Dataset(dataverseName, datasetName, ds.getItemTypeName(), ds.getDatasetDetails(), ds
.getHints(), ds.getDatasetType(), ds.getDatasetId(), IMetadataEntity.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
//#. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
//#. finally, delete the dataset.
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//#. execute compensation operations
// remove the all indexes in NC
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dataverseName = null;
String datasetName = null;
String indexName = null;
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
datasetName = stmtIndexDrop.getDatasetName().getValue();
dataverseName = stmtIndexDrop.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtIndexDrop.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
if (ds == null) {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
+ dataverseName);
}
if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
indexName = stmtIndexDrop.getIndexName().getValue();
Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (index == null) {
if (!stmtIndexDrop.getIfExists()) {
throw new AlgebricksException("There is no index with this name " + indexName + ".");
}
} else {
//#. prepare a job to drop the index in NC.
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexName);
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
//#. mark PendingDropOp on the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
MetadataManager.INSTANCE.addIndex(
mdTxnCtx,
new Index(dataverseName, datasetName, indexName, index.getIndexType(), index
.getKeyFieldNames(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
//#. commit the existing transaction before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
//#. begin a new transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
//#. finally, delete the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
}
} else {
throw new AlgebricksException(datasetName
+ " is an external dataset. Indexes are not maintained for external datasets.");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//#. execute compensation operations
// remove the all indexes in NC
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleTypeDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws AlgebricksException, MetadataException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt;
String dataverseName = stmtTypeDrop.getDataverseName() == null ? (activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName()) : stmtTypeDrop.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
String typeName = stmtTypeDrop.getTypeName().getValue();
Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (dt == null) {
if (!stmtTypeDrop.getIfExists())
throw new AlgebricksException("There is no datatype with this name " + typeName + ".");
} else {
MetadataManager.INSTANCE.dropDatatype(mdTxnCtx, dataverseName, typeName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleNodegroupDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, AlgebricksException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
NodeGroupDropStatement stmtDelete = (NodeGroupDropStatement) stmt;
String nodegroupName = stmtDelete.getNodeGroupName().getValue();
NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodegroupName);
if (ng == null) {
if (!stmtDelete.getIfExists())
throw new AlgebricksException("There is no nodegroup with this name " + nodegroupName + ".");
} else {
MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodegroupName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleCreateFunctionStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws AlgebricksException, MetadataException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
CreateFunctionStatement cfs = (CreateFunctionStatement) stmt;
String dataverse = cfs.getSignature().getNamespace() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : cfs.getSignature().getNamespace();
if (dataverse == null) {
throw new AlgebricksException(" dataverse not specified ");
}
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
if (dv == null) {
throw new AlgebricksException("There is no dataverse with this name " + dataverse + ".");
}
Function function = new Function(dataverse, cfs.getaAterixFunction().getName(), cfs.getaAterixFunction()
.getArity(), cfs.getParamList(), Function.RETURNTYPE_VOID, cfs.getFunctionBody(),
Function.LANGUAGE_AQL, FunctionKind.SCALAR.toString());
MetadataManager.INSTANCE.addFunction(mdTxnCtx, function);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleFunctionDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, RemoteException, ACIDException, AlgebricksException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
FunctionDropStatement stmtDropFunction = (FunctionDropStatement) stmt;
FunctionSignature signature = stmtDropFunction.getFunctionSignature();
Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
if (function == null) {
if (!stmtDropFunction.getIfExists())
throw new AlgebricksException("Unknonw function " + signature);
} else {
MetadataManager.INSTANCE.dropFunction(mdTxnCtx, signature);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleLoadFromFileStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
LoadFromFileStatement loadStmt = (LoadFromFileStatement) stmt;
String dataverseName = loadStmt.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : loadStmt.getDataverseName().getValue();
CompiledLoadFromFileStatement cls = new CompiledLoadFromFileStatement(dataverseName, loadStmt
.getDatasetName().getValue(), loadStmt.getAdapter(), loadStmt.getProperties(),
loadStmt.dataIsAlreadySorted());
IDataFormat format = getDataFormat(metadataProvider.getMetadataTxnContext(), dataverseName);
Job job = DatasetOperations.createLoadDatasetJobSpec(metadataProvider, cls, format);
jobsToExecute.add(job.getJobSpec());
// Also load the dataset's secondary indexes.
List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, loadStmt
.getDatasetName().getValue());
for (Index index : datasetIndexes) {
if (!index.isSecondaryIndex()) {
continue;
}
// Create CompiledCreateIndexStatement from metadata entity 'index'.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(),
dataverseName, index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(),
index.getIndexType());
jobsToExecute.add(IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadataProvider));
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
for (JobSpecification jobspec : jobsToExecute) {
runJob(hcc, jobspec, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleWriteFromQueryResultStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
metadataProvider.setWriteTransaction(true);
WriteFromQueryResultStatement st1 = (WriteFromQueryResultStatement) stmt;
String dataverseName = st1.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : st1.getDataverseName().getValue();
CompiledWriteFromQueryResultStatement clfrqs = new CompiledWriteFromQueryResultStatement(dataverseName, st1
.getDatasetName().getValue(), st1.getQuery(), st1.getVarCounter());
JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleInsertStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
metadataProvider.setWriteTransaction(true);
InsertStatement stmtInsert = (InsertStatement) stmt;
String dataverseName = stmtInsert.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtInsert.getDataverseName().getValue();
CompiledInsertStatement clfrqs = new CompiledInsertStatement(dataverseName, stmtInsert.getDatasetName()
.getValue(), stmtInsert.getQuery(), stmtInsert.getVarCounter());
JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleDeleteStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
metadataProvider.setWriteTransaction(true);
DeleteStatement stmtDelete = (DeleteStatement) stmt;
String dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName,
stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getDieClause(),
stmtDelete.getVarCounter(), metadataProvider);
JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private JobSpecification rewriteCompileQuery(AqlMetadataProvider metadataProvider, Query query,
ICompiledDmlStatement stmt) throws AsterixException, RemoteException, AlgebricksException, JSONException,
ACIDException {
// Query Rewriting (happens under the same ongoing metadata transaction)
Pair<Query, Integer> reWrittenQuery = APIFramework.reWriteQuery(declaredFunctions, metadataProvider, query,
sessionConfig, out, pdf);
// Query Compilation (happens under the same ongoing metadata
// transaction)
JobSpecification spec = APIFramework.compileQuery(declaredFunctions, metadataProvider, query,
reWrittenQuery.second, stmt == null ? null : stmt.getDatasetName(), sessionConfig, out, pdf, stmt);
return spec;
}
private void handleBeginFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
BeginFeedStatement bfs = (BeginFeedStatement) stmt;
String dataverseName = bfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : bfs.getDataverseName().getValue();
CompiledBeginFeedStatement cbfs = new CompiledBeginFeedStatement(dataverseName, bfs.getDatasetName()
.getValue(), bfs.getQuery(), bfs.getVarCounter());
Dataset dataset;
dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
.getDatasetName().getValue());
if (dataset == null) {
throw new AsterixException("Unknown dataset :" + bfs.getDatasetName().getValue());
}
IDatasetDetails datasetDetails = dataset.getDatasetDetails();
if (datasetDetails.getDatasetType() != DatasetType.FEED) {
throw new IllegalArgumentException("Dataset " + bfs.getDatasetName().getValue()
+ " is not a feed dataset");
}
bfs.initialize(metadataProvider.getMetadataTxnContext(), dataset);
cbfs.setQuery(bfs.getQuery());
JobSpecification compiled = rewriteCompileQuery(metadataProvider, bfs.getQuery(), cbfs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleControlFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
ControlFeedStatement cfs = (ControlFeedStatement) stmt;
String dataverseName = cfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : cfs.getDatasetName().getValue();
CompiledControlFeedStatement clcfs = new CompiledControlFeedStatement(cfs.getOperationType(),
dataverseName, cfs.getDatasetName().getValue(), cfs.getAlterAdapterConfParams());
JobSpecification jobSpec = FeedOperations.buildControlFeedJobSpec(clcfs, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, jobSpec, true);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private QueryResult handleQuery(AqlMetadataProvider metadataProvider, Query query, IHyracksClientConnection hcc,
IHyracksDataset hdc, boolean asyncResults) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
JobSpecification compiled = rewriteCompileQuery(metadataProvider, query, null);
QueryResult queryResult = new QueryResult(query, metadataProvider.getResultSetId());
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
GlobalConfig.ASTERIX_LOGGER.info(compiled.toJSON().toString(1));
JobId jobId = runJob(hcc, compiled, false);
JSONObject response = new JSONObject();
if (asyncResults) {
JSONArray handle = new JSONArray();
handle.put(jobId.getId());
handle.put(metadataProvider.getResultSetId().getId());
response.put("handle", handle);
out.print(response);
out.flush();
} else {
if (pdf == DisplayFormat.HTML) {
out.println("<pre>");
}
ByteBuffer buffer = ByteBuffer.allocate(ResultReader.FRAME_SIZE);
ResultReader resultReader = new ResultReader(hcc, hdc);
resultReader.open(jobId, metadataProvider.getResultSetId());
buffer.clear();
while (resultReader.read(buffer) > 0) {
response.put("results",
ResultUtils.getJSONFromBuffer(buffer, resultReader.getFrameTupleAccessor()));
buffer.clear();
switch (pdf) {
case HTML:
ResultUtils.prettyPrintHTML(out, response);
break;
case TEXT:
case JSON:
out.print(response);
break;
}
out.flush();
}
if (pdf == DisplayFormat.HTML) {
out.println("</pre>");
}
}
hcc.waitForCompletion(jobId);
}
return queryResult;
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleCreateNodeGroupStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, AlgebricksException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
NodegroupDecl stmtCreateNodegroup = (NodegroupDecl) stmt;
String ngName = stmtCreateNodegroup.getNodegroupName().getValue();
NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, ngName);
if (ng != null) {
if (!stmtCreateNodegroup.getIfNotExists())
throw new AlgebricksException("A nodegroup with this name " + ngName + " already exists.");
} else {
List<Identifier> ncIdentifiers = stmtCreateNodegroup.getNodeControllerNames();
List<String> ncNames = new ArrayList<String>(ncIdentifiers.size());
for (Identifier id : ncIdentifiers) {
ncNames.add(id.getValue());
}
MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(ngName, ncNames));
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private JobId runJob(IHyracksClientConnection hcc, JobSpecification spec, boolean waitForCompletion)
throws Exception {
JobId[] jobIds = executeJobArray(hcc, new Job[] { new Job(spec) }, out, pdf, waitForCompletion);
return jobIds[0];
}
public JobId[] executeJobArray(IHyracksClientConnection hcc, Job[] jobs, PrintWriter out, DisplayFormat pdf,
boolean waitForCompletion) throws Exception {
JobId[] startedJobIds = new JobId[jobs.length];
for (int i = 0; i < jobs.length; i++) {
JobSpecification spec = jobs[i].getJobSpec();
spec.setMaxReattempts(0);
JobId jobId = hcc.startJob(spec);
startedJobIds[i] = jobId;
if (waitForCompletion) {
hcc.waitForCompletion(jobId);
}
}
return startedJobIds;
}
private static IDataFormat getDataFormat(MetadataTransactionContext mdTxnCtx, String dataverseName)
throws AsterixException {
Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
IDataFormat format;
try {
format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
} catch (Exception e) {
throw new AsterixException(e);
}
return format;
}
private void acquireWriteLatch() {
MetadataManager.INSTANCE.acquireWriteLatch();
}
private void releaseWriteLatch() {
MetadataManager.INSTANCE.releaseWriteLatch();
}
private void acquireReadLatch() {
MetadataManager.INSTANCE.acquireReadLatch();
}
private void releaseReadLatch() {
MetadataManager.INSTANCE.releaseReadLatch();
}
}
|
asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
|
/*
* Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.aql.translator;
import java.io.File;
import java.io.PrintWriter;
import java.nio.ByteBuffer;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import edu.uci.ics.asterix.api.common.APIFramework;
import edu.uci.ics.asterix.api.common.APIFramework.DisplayFormat;
import edu.uci.ics.asterix.api.common.Job;
import edu.uci.ics.asterix.api.common.SessionConfig;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.ExternalDetailsDecl;
import edu.uci.ics.asterix.aql.expression.FeedDetailsDecl;
import edu.uci.ics.asterix.aql.expression.FunctionDecl;
import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
import edu.uci.ics.asterix.aql.expression.Identifier;
import edu.uci.ics.asterix.aql.expression.IndexDropStatement;
import edu.uci.ics.asterix.aql.expression.InsertStatement;
import edu.uci.ics.asterix.aql.expression.InternalDetailsDecl;
import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.Query;
import edu.uci.ics.asterix.aql.expression.SetStatement;
import edu.uci.ics.asterix.aql.expression.TypeDecl;
import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
import edu.uci.ics.asterix.aql.expression.WriteFromQueryResultStatement;
import edu.uci.ics.asterix.aql.expression.WriteStatement;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.config.GlobalConfig;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.file.DatasetOperations;
import edu.uci.ics.asterix.file.FeedOperations;
import edu.uci.ics.asterix.file.IndexOperations;
import edu.uci.ics.asterix.formats.base.IDataFormat;
import edu.uci.ics.asterix.metadata.IDatasetDetails;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.MetadataManager;
import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.metadata.entities.Index;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.types.TypeSignature;
import edu.uci.ics.asterix.result.ResultReader;
import edu.uci.ics.asterix.result.ResultUtils;
import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
import edu.uci.ics.asterix.transaction.management.service.transaction.DatasetIdFactory;
import edu.uci.ics.asterix.translator.AbstractAqlTranslator;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledBeginFeedStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledControlFeedStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDatasetDropStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDeleteStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledInsertStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledWriteFromQueryResultStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
import edu.uci.ics.asterix.translator.TypeTranslator;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
import edu.uci.ics.hyracks.algebricks.data.IAWriterFactory;
import edu.uci.ics.hyracks.algebricks.data.IResultSerializerFactoryProvider;
import edu.uci.ics.hyracks.algebricks.runtime.serializer.ResultSerializerFactoryProvider;
import edu.uci.ics.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
/*
* Provides functionality for executing a batch of AQL statements (queries included)
* sequentially.
*/
public class AqlTranslator extends AbstractAqlTranslator {
private final List<Statement> aqlStatements;
private final PrintWriter out;
private final SessionConfig sessionConfig;
private final DisplayFormat pdf;
private Dataverse activeDefaultDataverse;
private List<FunctionDecl> declaredFunctions;
public AqlTranslator(List<Statement> aqlStatements, PrintWriter out, SessionConfig pc, DisplayFormat pdf)
throws MetadataException, AsterixException {
this.aqlStatements = aqlStatements;
this.out = out;
this.sessionConfig = pc;
this.pdf = pdf;
declaredFunctions = getDeclaredFunctions(aqlStatements);
}
private List<FunctionDecl> getDeclaredFunctions(List<Statement> statements) {
List<FunctionDecl> functionDecls = new ArrayList<FunctionDecl>();
for (Statement st : statements) {
if (st.getKind().equals(Statement.Kind.FUNCTION_DECL)) {
functionDecls.add((FunctionDecl) st);
}
}
return functionDecls;
}
/**
* Compiles and submits for execution a list of AQL statements.
*
* @param hcc
* A Hyracks client connection that is used to submit a jobspec to Hyracks.
* @param hdc
* A Hyracks dataset client object that is used to read the results.
* @param asyncResults
* True if the results should be read asynchronously or false if we should wait for results to be read.
* @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
* @throws Exception
*/
public List<QueryResult> compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, boolean asyncResults)
throws Exception {
int resultSetIdCounter = 0;
List<QueryResult> executionResult = new ArrayList<QueryResult>();
FileSplit outputFile = null;
IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
IResultSerializerFactoryProvider resultSerializerFactoryProvider = ResultSerializerFactoryProvider.INSTANCE;
Map<String, String> config = new HashMap<String, String>();
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
for (Statement stmt : aqlStatements) {
validateOperation(activeDefaultDataverse, stmt);
AqlMetadataProvider metadataProvider = new AqlMetadataProvider(activeDefaultDataverse);
metadataProvider.setWriterFactory(writerFactory);
metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
metadataProvider.setOutputFile(outputFile);
metadataProvider.setConfig(config);
jobsToExecute.clear();
switch (stmt.getKind()) {
case SET: {
handleSetStatement(metadataProvider, stmt, config);
break;
}
case DATAVERSE_DECL: {
activeDefaultDataverse = handleUseDataverseStatement(metadataProvider, stmt);
break;
}
case CREATE_DATAVERSE: {
handleCreateDataverseStatement(metadataProvider, stmt);
break;
}
case DATASET_DECL: {
handleCreateDatasetStatement(metadataProvider, stmt, hcc);
break;
}
case CREATE_INDEX: {
handleCreateIndexStatement(metadataProvider, stmt, hcc);
break;
}
case TYPE_DECL: {
handleCreateTypeStatement(metadataProvider, stmt);
break;
}
case NODEGROUP_DECL: {
handleCreateNodeGroupStatement(metadataProvider, stmt);
break;
}
case DATAVERSE_DROP: {
handleDataverseDropStatement(metadataProvider, stmt, hcc);
break;
}
case DATASET_DROP: {
handleDatasetDropStatement(metadataProvider, stmt, hcc);
break;
}
case INDEX_DROP: {
handleIndexDropStatement(metadataProvider, stmt, hcc);
break;
}
case TYPE_DROP: {
handleTypeDropStatement(metadataProvider, stmt);
break;
}
case NODEGROUP_DROP: {
handleNodegroupDropStatement(metadataProvider, stmt);
break;
}
case CREATE_FUNCTION: {
handleCreateFunctionStatement(metadataProvider, stmt);
break;
}
case FUNCTION_DROP: {
handleFunctionDropStatement(metadataProvider, stmt);
break;
}
case LOAD_FROM_FILE: {
handleLoadFromFileStatement(metadataProvider, stmt, hcc);
break;
}
case WRITE_FROM_QUERY_RESULT: {
handleWriteFromQueryResultStatement(metadataProvider, stmt, hcc);
break;
}
case INSERT: {
handleInsertStatement(metadataProvider, stmt, hcc);
break;
}
case DELETE: {
handleDeleteStatement(metadataProvider, stmt, hcc);
break;
}
case BEGIN_FEED: {
handleBeginFeedStatement(metadataProvider, stmt, hcc);
break;
}
case CONTROL_FEED: {
handleControlFeedStatement(metadataProvider, stmt, hcc);
break;
}
case QUERY: {
metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
executionResult.add(handleQuery(metadataProvider, (Query) stmt, hcc, hdc, asyncResults));
break;
}
case WRITE: {
Pair<IAWriterFactory, FileSplit> result = handleWriteStatement(metadataProvider, stmt);
if (result.first != null) {
writerFactory = result.first;
}
outputFile = result.second;
break;
}
}
}
return executionResult;
}
private void handleSetStatement(AqlMetadataProvider metadataProvider, Statement stmt, Map<String, String> config)
throws RemoteException, ACIDException {
SetStatement ss = (SetStatement) stmt;
String pname = ss.getPropName();
String pvalue = ss.getPropValue();
config.put(pname, pvalue);
}
private Pair<IAWriterFactory, FileSplit> handleWriteStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws InstantiationException, IllegalAccessException, ClassNotFoundException {
WriteStatement ws = (WriteStatement) stmt;
File f = new File(ws.getFileName());
FileSplit outputFile = new FileSplit(ws.getNcName().getValue(), new FileReference(f));
IAWriterFactory writerFactory = null;
if (ws.getWriterClassName() != null) {
writerFactory = (IAWriterFactory) Class.forName(ws.getWriterClassName()).newInstance();
}
return new Pair<IAWriterFactory, FileSplit>(writerFactory, outputFile);
}
private Dataverse handleUseDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
DataverseDecl dvd = (DataverseDecl) stmt;
String dvName = dvd.getDataverseName().getValue();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
if (dv == null) {
throw new MetadataException("Unknown dataverse " + dvName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return dv;
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new MetadataException(e);
} finally {
releaseReadLatch();
}
}
private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, AlgebricksException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
String dvName = stmtCreateDataverse.getDataverseName().getValue();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
if (dv != null && !stmtCreateDataverse.getIfNotExists()) {
throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
}
MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName,
stmtCreateDataverse.getFormat(), IMetadataEntity.PENDING_NO_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws AsterixException, Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dataverseName = null;
String datasetName = null;
Dataset dataset = null;
try {
DatasetDecl dd = (DatasetDecl) stmt;
dataverseName = dd.getDataverse() != null ? dd.getDataverse().getValue()
: activeDefaultDataverse != null ? activeDefaultDataverse.getDataverseName() : null;
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
datasetName = dd.getName().getValue();
DatasetType dsType = dd.getDatasetType();
String itemTypeName = dd.getItemTypeName().getValue();
IDatasetDetails datasetDetails = null;
Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
if (ds != null) {
if (dd.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
}
}
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
itemTypeName);
if (dt == null) {
throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
}
switch (dd.getDatasetType()) {
case INTERNAL: {
IAType itemType = dt.getDatatype();
if (itemType.getTypeTag() != ATypeTag.RECORD) {
throw new AlgebricksException("Can only partition ARecord's.");
}
List<String> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
.getPartitioningExprs();
ARecordType aRecordType = (ARecordType) itemType;
aRecordType.validatePartitioningExpressions(partitioningExprs);
String ngName = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
ngName);
break;
}
case EXTERNAL: {
String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
datasetDetails = new ExternalDatasetDetails(adapter, properties);
break;
}
case FEED: {
IAType itemType = dt.getDatatype();
if (itemType.getTypeTag() != ATypeTag.RECORD) {
throw new AlgebricksException("Can only partition ARecord's.");
}
List<String> partitioningExprs = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
.getPartitioningExprs();
ARecordType aRecordType = (ARecordType) itemType;
aRecordType.validatePartitioningExpressions(partitioningExprs);
String ngName = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
String adapter = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getAdapterFactoryClassname();
Map<String, String> configuration = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
.getConfiguration();
FunctionSignature signature = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getFunctionSignature();
datasetDetails = new FeedDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
ngName, adapter, configuration, signature, FeedDatasetDetails.FeedState.INACTIVE.toString());
break;
}
}
//#. add a new dataset with PendingAddOp
dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType,
DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
dataverseName);
JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
metadataProvider);
//#. make metadataTxn commit before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
//#. runJob
runJob(hcc, jobSpec, true);
//#. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
//#. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), new Dataset(dataverseName,
datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType, dataset.getDatasetId(),
IMetadataEntity.PENDING_NO_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
if (dataset != null) {
//#. execute compensation operations
// remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
try {
JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, jobSpec, true);
} catch (Exception e3) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//do no throw exception since still the metadata needs to be compensated.
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dataverseName = null;
String datasetName = null;
String indexName = null;
JobSpecification spec = null;
try {
CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
dataverseName = stmtCreateIndex.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtCreateIndex.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
datasetName = stmtCreateIndex.getDatasetName().getValue();
Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
if (ds == null) {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
+ dataverseName);
}
indexName = stmtCreateIndex.getIndexName().getValue();
Index idx = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
String itemTypeName = ds.getItemTypeName();
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
itemTypeName);
IAType itemType = dt.getDatatype();
ARecordType aRecordType = (ARecordType) itemType;
aRecordType.validateKeyFields(stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getIndexType());
if (idx != null) {
if (!stmtCreateIndex.getIfNotExists()) {
throw new AlgebricksException("An index with this name " + indexName + " already exists.");
} else {
stmtCreateIndex.setNeedToCreate(false);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
}
}
//#. add a new index with PendingAddOp
Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
//#. create the index artifact in NC.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
if (spec == null) {
throw new AsterixException("Failed to create job spec for creating index '"
+ stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, spec, true);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
//#. load data into the index in NC.
cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, spec, true);
//#. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
//#. add another new index with PendingNoOp after deleting the index with PendingAddOp
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
indexName);
index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
IMetadataEntity.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
if (spec != null) {
//#. execute compensation operations
// remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
try {
JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, jobSpec, true);
} catch (Exception e3) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//do no throw exception since still the metadata needs to be compensated.
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws AlgebricksException, RemoteException, ACIDException, MetadataException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
TypeDecl stmtCreateType = (TypeDecl) stmt;
String dataverseName = stmtCreateType.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtCreateType.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
String typeName = stmtCreateType.getIdent().getValue();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
throw new AlgebricksException("Unknonw dataverse " + dataverseName);
}
Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (dt != null) {
if (!stmtCreateType.getIfNotExists()) {
throw new AlgebricksException("A datatype with this name " + typeName + " already exists.");
}
} else {
if (builtinTypeMap.get(typeName) != null) {
throw new AlgebricksException("Cannot redefine builtin type " + typeName + ".");
} else {
Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, (TypeDecl) stmt,
dataverseName);
TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
IAType type = typeMap.get(typeSignature);
MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
}
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dvName = null;
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
dvName = stmtDelete.getDataverseName().getValue();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dvName);
if (dv == null) {
if (!stmtDelete.getIfExists()) {
throw new AlgebricksException("There is no dataverse with this name " + dvName + ".");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
}
//#. prepare jobs which will drop corresponding datasets with indexes.
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dvName);
for (int j = 0; j < datasets.size(); j++) {
String datasetName = datasets.get(j).getDatasetName();
DatasetType dsType = datasets.get(j).getDatasetType();
if (dsType == DatasetType.INTERNAL || dsType == DatasetType.FEED) {
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dvName, datasetName);
for (int k = 0; k < indexes.size(); k++) {
if (indexes.get(k).isSecondaryIndex()) {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dvName, datasetName,
indexes.get(k).getIndexName());
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
}
}
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dvName, datasetName);
jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
}
}
//#. mark PendingDropOp on the dataverse record by
// first, deleting the dataverse record from the DATAVERSE_DATASET
// second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dvName, dv.getDataFormat(),
IMetadataEntity.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
//#. finally, delete the dataverse.
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dvName) {
activeDefaultDataverse = null;
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//#. execute compensation operations
// remove the all indexes in NC
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataverse(metadataProvider.getMetadataTxnContext(), dvName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleDatasetDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dataverseName = null;
String datasetName = null;
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
DropStatement stmtDelete = (DropStatement) stmt;
dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
datasetName = stmtDelete.getDatasetName().getValue();
Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
if (ds == null) {
if (!stmtDelete.getIfExists()) {
throw new AlgebricksException("There is no dataset with this name " + datasetName
+ " in dataverse " + dataverseName + ".");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
}
if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
//#. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (indexes.get(j).isSecondaryIndex()) {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexes.get(j).getIndexName());
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
}
}
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
//#. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(
mdTxnCtx,
new Dataset(dataverseName, datasetName, ds.getItemTypeName(), ds.getDatasetDetails(), ds
.getHints(), ds.getDatasetType(), ds.getDatasetId(), IMetadataEntity.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
//#. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
//#. finally, delete the dataset.
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//#. execute compensation operations
// remove the all indexes in NC
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
String dataverseName = null;
String datasetName = null;
String indexName = null;
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
datasetName = stmtIndexDrop.getDatasetName().getValue();
dataverseName = stmtIndexDrop.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtIndexDrop.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
if (ds == null) {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
+ dataverseName);
}
if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
indexName = stmtIndexDrop.getIndexName().getValue();
Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (index == null) {
if (!stmtIndexDrop.getIfExists()) {
throw new AlgebricksException("There is no index with this name " + indexName + ".");
}
} else {
//#. prepare a job to drop the index in NC.
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexName);
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
//#. mark PendingDropOp on the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
MetadataManager.INSTANCE.addIndex(
mdTxnCtx,
new Index(dataverseName, datasetName, indexName, index.getIndexType(), index
.getKeyFieldNames(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
//#. commit the existing transaction before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
//#. begin a new transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
//#. finally, delete the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
}
} else {
throw new AlgebricksException(datasetName
+ " is an external dataset. Indexes are not maintained for external datasets.");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
//#. execute compensation operations
// remove the all indexes in NC
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleTypeDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws AlgebricksException, MetadataException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt;
String dataverseName = stmtTypeDrop.getDataverseName() == null ? (activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName()) : stmtTypeDrop.getDataverseName().getValue();
if (dataverseName == null) {
throw new AlgebricksException(" dataverse not specified ");
}
String typeName = stmtTypeDrop.getTypeName().getValue();
Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (dt == null) {
if (!stmtTypeDrop.getIfExists())
throw new AlgebricksException("There is no datatype with this name " + typeName + ".");
} else {
MetadataManager.INSTANCE.dropDatatype(mdTxnCtx, dataverseName, typeName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleNodegroupDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, AlgebricksException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
NodeGroupDropStatement stmtDelete = (NodeGroupDropStatement) stmt;
String nodegroupName = stmtDelete.getNodeGroupName().getValue();
NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodegroupName);
if (ng == null) {
if (!stmtDelete.getIfExists())
throw new AlgebricksException("There is no nodegroup with this name " + nodegroupName + ".");
} else {
MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodegroupName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleCreateFunctionStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws AlgebricksException, MetadataException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
CreateFunctionStatement cfs = (CreateFunctionStatement) stmt;
String dataverse = cfs.getSignature().getNamespace() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : cfs.getSignature().getNamespace();
if (dataverse == null) {
throw new AlgebricksException(" dataverse not specified ");
}
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
if (dv == null) {
throw new AlgebricksException("There is no dataverse with this name " + dataverse + ".");
}
Function function = new Function(dataverse, cfs.getaAterixFunction().getName(), cfs.getaAterixFunction()
.getArity(), cfs.getParamList(), Function.RETURNTYPE_VOID, cfs.getFunctionBody(),
Function.LANGUAGE_AQL, FunctionKind.SCALAR.toString());
MetadataManager.INSTANCE.addFunction(mdTxnCtx, function);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleFunctionDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, RemoteException, ACIDException, AlgebricksException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
FunctionDropStatement stmtDropFunction = (FunctionDropStatement) stmt;
FunctionSignature signature = stmtDropFunction.getFunctionSignature();
Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
if (function == null) {
if (!stmtDropFunction.getIfExists())
throw new AlgebricksException("Unknonw function " + signature);
} else {
MetadataManager.INSTANCE.dropFunction(mdTxnCtx, signature);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private void handleLoadFromFileStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
LoadFromFileStatement loadStmt = (LoadFromFileStatement) stmt;
String dataverseName = loadStmt.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : loadStmt.getDataverseName().getValue();
CompiledLoadFromFileStatement cls = new CompiledLoadFromFileStatement(dataverseName, loadStmt
.getDatasetName().getValue(), loadStmt.getAdapter(), loadStmt.getProperties(),
loadStmt.dataIsAlreadySorted());
IDataFormat format = getDataFormat(metadataProvider.getMetadataTxnContext(), dataverseName);
Job job = DatasetOperations.createLoadDatasetJobSpec(metadataProvider, cls, format);
jobsToExecute.add(job.getJobSpec());
// Also load the dataset's secondary indexes.
List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, loadStmt
.getDatasetName().getValue());
for (Index index : datasetIndexes) {
if (!index.isSecondaryIndex()) {
continue;
}
// Create CompiledCreateIndexStatement from metadata entity 'index'.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(),
dataverseName, index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(),
index.getIndexType());
jobsToExecute.add(IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadataProvider));
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
for (JobSpecification jobspec : jobsToExecute) {
runJob(hcc, jobspec, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleWriteFromQueryResultStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
metadataProvider.setWriteTransaction(true);
WriteFromQueryResultStatement st1 = (WriteFromQueryResultStatement) stmt;
String dataverseName = st1.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : st1.getDataverseName().getValue();
CompiledWriteFromQueryResultStatement clfrqs = new CompiledWriteFromQueryResultStatement(dataverseName, st1
.getDatasetName().getValue(), st1.getQuery(), st1.getVarCounter());
JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleInsertStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
metadataProvider.setWriteTransaction(true);
InsertStatement stmtInsert = (InsertStatement) stmt;
String dataverseName = stmtInsert.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtInsert.getDataverseName().getValue();
CompiledInsertStatement clfrqs = new CompiledInsertStatement(dataverseName, stmtInsert.getDatasetName()
.getValue(), stmtInsert.getQuery(), stmtInsert.getVarCounter());
JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleDeleteStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
metadataProvider.setWriteTransaction(true);
DeleteStatement stmtDelete = (DeleteStatement) stmt;
String dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName,
stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getDieClause(),
stmtDelete.getVarCounter(), metadataProvider);
JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private JobSpecification rewriteCompileQuery(AqlMetadataProvider metadataProvider, Query query,
ICompiledDmlStatement stmt) throws AsterixException, RemoteException, AlgebricksException, JSONException,
ACIDException {
// Query Rewriting (happens under the same ongoing metadata transaction)
Pair<Query, Integer> reWrittenQuery = APIFramework.reWriteQuery(declaredFunctions, metadataProvider, query,
sessionConfig, out, pdf);
// Query Compilation (happens under the same ongoing metadata
// transaction)
JobSpecification spec = APIFramework.compileQuery(declaredFunctions, metadataProvider, query,
reWrittenQuery.second, stmt == null ? null : stmt.getDatasetName(), sessionConfig, out, pdf, stmt);
return spec;
}
private void handleBeginFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
BeginFeedStatement bfs = (BeginFeedStatement) stmt;
String dataverseName = bfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : bfs.getDataverseName().getValue();
CompiledBeginFeedStatement cbfs = new CompiledBeginFeedStatement(dataverseName, bfs.getDatasetName()
.getValue(), bfs.getQuery(), bfs.getVarCounter());
Dataset dataset;
dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
.getDatasetName().getValue());
if (dataset == null) {
throw new AsterixException("Unknown dataset :" + bfs.getDatasetName().getValue());
}
IDatasetDetails datasetDetails = dataset.getDatasetDetails();
if (datasetDetails.getDatasetType() != DatasetType.FEED) {
throw new IllegalArgumentException("Dataset " + bfs.getDatasetName().getValue()
+ " is not a feed dataset");
}
bfs.initialize(metadataProvider.getMetadataTxnContext(), dataset);
cbfs.setQuery(bfs.getQuery());
JobSpecification compiled = rewriteCompileQuery(metadataProvider, bfs.getQuery(), cbfs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleControlFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
ControlFeedStatement cfs = (ControlFeedStatement) stmt;
String dataverseName = cfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : cfs.getDatasetName().getValue();
CompiledControlFeedStatement clcfs = new CompiledControlFeedStatement(cfs.getOperationType(),
dataverseName, cfs.getDatasetName().getValue(), cfs.getAlterAdapterConfParams());
JobSpecification jobSpec = FeedOperations.buildControlFeedJobSpec(clcfs, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, jobSpec, true);
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private QueryResult handleQuery(AqlMetadataProvider metadataProvider, Query query, IHyracksClientConnection hcc,
IHyracksDataset hdc, boolean asyncResults) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
try {
JobSpecification compiled = rewriteCompileQuery(metadataProvider, query, null);
QueryResult queryResult = new QueryResult(query, metadataProvider.getResultSetId());
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
GlobalConfig.ASTERIX_LOGGER.info(compiled.toJSON().toString(1));
JobId jobId = runJob(hcc, compiled, false);
JSONObject response = new JSONObject();
if (asyncResults) {
JSONArray handle = new JSONArray();
handle.put(jobId.getId());
handle.put(metadataProvider.getResultSetId().getId());
response.put("handle", handle);
out.print(response);
out.flush();
} else {
if (pdf == DisplayFormat.HTML) {
out.println("<pre>");
}
ByteBuffer buffer = ByteBuffer.allocate(ResultReader.FRAME_SIZE);
ResultReader resultReader = new ResultReader(hcc, hdc);
resultReader.open(jobId, metadataProvider.getResultSetId());
buffer.clear();
while (resultReader.read(buffer) > 0) {
response.put("results", ResultUtils.getJSONFromBuffer(buffer, resultReader.getFrameTupleAccessor()));
buffer.clear();
switch(pdf) {
case HTML:
ResultUtils.prettyPrintHTML(out, response);
break;
case TEXT:
case JSON:
out.print(response);
break;
}
out.flush();
}
if (pdf == DisplayFormat.HTML) {
out.println("</pre>");
}
}
hcc.waitForCompletion(jobId);
}
return queryResult;
} catch (Exception e) {
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
throw e;
} finally {
releaseReadLatch();
}
}
private void handleCreateNodeGroupStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws MetadataException, AlgebricksException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
try {
NodegroupDecl stmtCreateNodegroup = (NodegroupDecl) stmt;
String ngName = stmtCreateNodegroup.getNodegroupName().getValue();
NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, ngName);
if (ng != null) {
if (!stmtCreateNodegroup.getIfNotExists())
throw new AlgebricksException("A nodegroup with this name " + ngName + " already exists.");
} else {
List<Identifier> ncIdentifiers = stmtCreateNodegroup.getNodeControllerNames();
List<String> ncNames = new ArrayList<String>(ncIdentifiers.size());
for (Identifier id : ncIdentifiers) {
ncNames.add(id.getValue());
}
MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(ngName, ncNames));
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw e;
} finally {
releaseWriteLatch();
}
}
private JobId runJob(IHyracksClientConnection hcc, JobSpecification spec, boolean waitForCompletion)
throws Exception {
JobId[] jobIds = executeJobArray(hcc, new Job[] { new Job(spec) }, out, pdf, waitForCompletion);
return jobIds[0];
}
public JobId[] executeJobArray(IHyracksClientConnection hcc, Job[] jobs, PrintWriter out, DisplayFormat pdf,
boolean waitForCompletion) throws Exception {
JobId[] startedJobIds = new JobId[jobs.length];
for (int i = 0; i < jobs.length; i++) {
JobSpecification spec = jobs[i].getJobSpec();
spec.setMaxReattempts(0);
JobId jobId = hcc.startJob(spec);
startedJobIds[i] = jobId;
if (waitForCompletion) {
hcc.waitForCompletion(jobId);
}
}
return startedJobIds;
}
private static IDataFormat getDataFormat(MetadataTransactionContext mdTxnCtx, String dataverseName)
throws AsterixException {
Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
IDataFormat format;
try {
format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
} catch (Exception e) {
throw new AsterixException(e);
}
return format;
}
private void acquireWriteLatch() {
MetadataManager.INSTANCE.acquireWriteLatch();
}
private void releaseWriteLatch() {
MetadataManager.INSTANCE.releaseWriteLatch();
}
private void acquireReadLatch() {
MetadataManager.INSTANCE.acquireReadLatch();
}
private void releaseReadLatch() {
MetadataManager.INSTANCE.releaseReadLatch();
}
}
|
Formatting fix.
|
asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
|
Formatting fix.
|
|
Java
|
apache-2.0
|
8b6a9a5f07b3e938156000cac1e089bfed9b5392
| 0
|
MICommunity/psi-jami,MICommunity/psi-jami,MICommunity/psi-jami
|
package psidev.psi.mi.jami.model.impl;
import psidev.psi.mi.jami.model.*;
import psidev.psi.mi.jami.utils.CvTermUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
/**
* Default implementation for Interactor
*
* Notes: The equals and hashcode methods have NOT been overridden because the Interactor object is a complex object.
* To compare Interactor objects, you can use some comparators provided by default:
* - DefaultInteractorBaseComparator
* - UnambiguousInteractorBaseComparator
* - DefaultExactInteractorBaseComparator
* - UnambiguousExactInteractorBaseComparator
* - DefaultInteractorComparator
* - UnambiguousInteractorComparator
* - DefaultExactInteractorComparator
* - UnambiguousExactInteractorComparator
* - AbstractInteractorBaseComparator
*
* @author Marine Dumousseau (marine@ebi.ac.uk)
* @version $Id$
* @since <pre>22/01/13</pre>
*/
public class DefaultInteractor implements Interactor, Serializable {
private String shortName;
private String fullName;
private Collection<Xref> identifiers;
private Collection<Checksum> checksums;
private Collection<Xref> xrefs;
private Collection<Annotation> annotations;
private Collection<Alias> aliases;
private Organism organism;
private CvTerm interactorType;
public DefaultInteractor(String name, CvTerm type){
if (name == null || (name != null && name.length() == 0)){
throw new IllegalArgumentException("The short name cannot be null or empty.");
}
this.shortName = name;
if (type == null){
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
else {
this.interactorType = type;
}
}
public DefaultInteractor(String name, String fullName, CvTerm type){
this(name, type);
this.fullName = fullName;
}
public DefaultInteractor(String name, CvTerm type, Organism organism){
this(name, type);
this.organism = organism;
}
public DefaultInteractor(String name, String fullName, CvTerm type, Organism organism){
this(name, fullName, type);
this.organism = organism;
}
public DefaultInteractor(String name, CvTerm type, Xref uniqueId){
this(name, type);
getIdentifiers().add(uniqueId);
}
public DefaultInteractor(String name, String fullName, CvTerm type, Xref uniqueId){
this(name, fullName, type);
getIdentifiers().add(uniqueId);
}
public DefaultInteractor(String name, CvTerm type, Organism organism, Xref uniqueId){
this(name, type, organism);
getIdentifiers().add(uniqueId);
}
public DefaultInteractor(String name, String fullName, CvTerm type, Organism organism, Xref uniqueId){
this(name, fullName, type, organism);
getIdentifiers().add(uniqueId);
}
public DefaultInteractor(String name){
if (name == null || (name != null && name.length() == 0)){
throw new IllegalArgumentException("The short name cannot be null or empty.");
}
this.shortName = name;
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, String fullName){
this(name);
this.fullName = fullName;
}
public DefaultInteractor(String name, Organism organism){
this(name);
this.organism = organism;
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, String fullName, Organism organism){
this(name, fullName);
this.organism = organism;
}
public DefaultInteractor(String name, Xref uniqueId){
this(name);
getIdentifiers().add(uniqueId);
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, String fullName, Xref uniqueId){
this(name, fullName);
getIdentifiers().add(uniqueId);
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, Organism organism, Xref uniqueId){
this(name, organism);
getIdentifiers().add(uniqueId);
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, String fullName, Organism organism, Xref uniqueId){
this(name, fullName, organism);
getIdentifiers().add(uniqueId);
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
protected void initialiseAnnotations(){
this.annotations = new ArrayList<Annotation>();
}
protected void initialiseXrefs(){
this.xrefs = new ArrayList<Xref>();
}
protected void initialiseAliases(){
this.aliases = new ArrayList<Alias>();
}
protected void initialiseIdentifiers(){
this.identifiers = new ArrayList<Xref>();
}
protected void initialiseChecksums(){
this.checksums = new ArrayList<Checksum>();
}
protected void initialiseAnnotationsWith(Collection<Annotation> annotations){
if (annotations == null){
this.annotations = Collections.EMPTY_LIST;
}
else {
this.annotations = annotations;
}
}
protected void initialiseXrefsWith(Collection<Xref> xrefs){
if (xrefs == null){
this.xrefs = Collections.EMPTY_LIST;
}
else {
this.xrefs = xrefs;
}
}
protected void initialiseAliasesWith(Collection<Alias> aliases){
if (aliases == null){
this.aliases = Collections.EMPTY_LIST;
}
else {
this.aliases = aliases;
}
}
protected void initialiseIdentifiersWith(Collection<Xref> identifiers){
if (identifiers == null){
this.identifiers = Collections.EMPTY_LIST;
}
else {
this.identifiers = identifiers;
}
}
protected void initialiseChecksumsWith(Collection<Checksum> checksums){
if (checksums == null){
this.checksums = Collections.EMPTY_LIST;
}
else {
this.checksums = checksums;
}
}
public String getShortName() {
return this.shortName;
}
public void setShortName(String name) {
if (name == null || (name != null && name.length() == 0)){
throw new IllegalArgumentException("The short name cannot be null or empty.");
}
this.shortName = name;
}
public String getFullName() {
return this.fullName;
}
public void setFullName(String name) {
this.fullName = name;
}
public Collection<Xref> getIdentifiers() {
if (identifiers == null){
initialiseIdentifiers();
}
return this.identifiers;
}
/**
*
* @return the first identifier in the list of identifiers or null if the list is empty
*/
public Xref getPreferredIdentifier() {
return !getIdentifiers().isEmpty() ? getIdentifiers().iterator().next() : null;
}
public Collection<Checksum> getChecksums() {
if (checksums == null){
initialiseChecksums();
}
return this.checksums;
}
public Collection<Xref> getXrefs() {
if (xrefs == null){
initialiseXrefs();
}
return this.xrefs;
}
public Collection<Annotation> getAnnotations() {
if (annotations == null){
initialiseAnnotations();
}
return this.annotations;
}
public Collection<Alias> getAliases() {
if (aliases == null){
initialiseAliases();
}
return this.aliases;
}
public Organism getOrganism() {
return this.organism;
}
public void setOrganism(Organism organism) {
this.organism = organism;
}
public CvTerm getInteractorType() {
return this.interactorType;
}
public void setInteractorType(CvTerm interactorType) {
if (interactorType == null){
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
else {
this.interactorType = interactorType;
}
}
@Override
public String toString() {
return shortName + (organism != null ? ", " + organism.toString() : "") + (interactorType != null ? ", " + interactorType.toString() : "") ;
}
}
|
jami-core/src/main/java/psidev/psi/mi/jami/model/impl/DefaultInteractor.java
|
package psidev.psi.mi.jami.model.impl;
import psidev.psi.mi.jami.model.*;
import psidev.psi.mi.jami.utils.CvTermUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
/**
* Default implementation for Interactor
*
* Notes: The equals and hashcode methods have NOT been overridden because the Interactor object is a complex object.
* To compare Interactor objects, you can use some comparators provided by default:
* - DefaultInteractorBaseComparator
* - UnambiguousInteractorBaseComparator
* - DefaultExactInteractorBaseComparator
* - UnambiguousExactInteractorBaseComparator
* - DefaultInteractorComparator
* - UnambiguousInteractorComparator
* - DefaultExactInteractorComparator
* - UnambiguousExactInteractorComparator
* - AbstractInteractorBaseComparator
*
* @author Marine Dumousseau (marine@ebi.ac.uk)
* @version $Id$
* @since <pre>22/01/13</pre>
*/
public class DefaultInteractor implements Interactor, Serializable {
private String shortName;
private String fullName;
private Collection<Xref> identifiers;
private Collection<Checksum> checksums;
private Collection<Xref> xrefs;
private Collection<Annotation> annotations;
private Collection<Alias> aliases;
private Organism organism;
private CvTerm interactorType;
public DefaultInteractor(String name, CvTerm type){
if (name == null || (name != null && name.length() == 0)){
throw new IllegalArgumentException("The short name cannot be null or empty.");
}
this.shortName = name;
if (type == null){
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
else {
this.interactorType = type;
}
}
public DefaultInteractor(String name, String fullName, CvTerm type){
this(name, type);
this.fullName = fullName;
}
public DefaultInteractor(String name, CvTerm type, Organism organism){
this(name, type);
this.organism = organism;
}
public DefaultInteractor(String name, String fullName, CvTerm type, Organism organism){
this(name, fullName, type);
this.organism = organism;
}
public DefaultInteractor(String name, CvTerm type, Xref uniqueId){
this(name, type);
getIdentifiers().add(uniqueId);
}
public DefaultInteractor(String name, String fullName, CvTerm type, Xref uniqueId){
this(name, fullName, type);
getIdentifiers().add(uniqueId);
}
public DefaultInteractor(String name, CvTerm type, Organism organism, Xref uniqueId){
this(name, type, organism);
getIdentifiers().add(uniqueId);
}
public DefaultInteractor(String name, String fullName, CvTerm type, Organism organism, Xref uniqueId){
this(name, fullName, type, organism);
getIdentifiers().add(uniqueId);
}
public DefaultInteractor(String name){
if (name == null || (name != null && name.length() == 0)){
throw new IllegalArgumentException("The short name cannot be null or empty.");
}
this.shortName = name;
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, String fullName){
this(name);
this.fullName = fullName;
}
public DefaultInteractor(String name, Organism organism){
this(name);
this.organism = organism;
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, String fullName, Organism organism){
this(name, fullName);
this.organism = organism;
}
public DefaultInteractor(String name, Xref uniqueId){
this(name);
getIdentifiers().add(uniqueId);
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, String fullName, Xref uniqueId){
this(name, fullName);
getIdentifiers().add(uniqueId);
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, Organism organism, Xref uniqueId){
this(name, organism);
getIdentifiers().add(uniqueId);
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
public DefaultInteractor(String name, String fullName, Organism organism, Xref uniqueId){
this(name, fullName, organism);
getIdentifiers().add(uniqueId);
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
protected void initialiseAnnotations(){
this.annotations = new ArrayList<Annotation>();
}
protected void initialiseXrefs(){
this.xrefs = new ArrayList<Xref>();
}
protected void initialiseAliases(){
this.aliases = new ArrayList<Alias>();
}
protected void initialiseIdentifiers(){
this.identifiers = new ArrayList<Xref>();
}
protected void initialiseChecksums(){
this.checksums = new ArrayList<Checksum>();
}
protected void initialiseAnnotationsWith(Collection<Annotation> annotations){
if (annotations == null){
this.annotations = Collections.EMPTY_LIST;
}
else {
this.annotations = annotations;
}
}
protected void initialiseXrefsWith(Collection<Xref> xrefs){
if (xrefs == null){
this.xrefs = Collections.EMPTY_LIST;
}
else {
this.xrefs = xrefs;
}
}
protected void initialiseAliasesWith(Collection<Alias> aliases){
if (aliases == null){
this.aliases = Collections.EMPTY_LIST;
}
else {
this.aliases = aliases;
}
}
protected void initialiseIdentifiersWith(Collection<Xref> identifiers){
if (identifiers == null){
this.identifiers = Collections.EMPTY_LIST;
}
else {
this.identifiers = identifiers;
}
}
protected void initialiseChecksumsWith(Collection<Checksum> checksums){
if (checksums == null){
this.checksums = Collections.EMPTY_LIST;
}
else {
this.checksums = checksums;
}
}
public String getShortName() {
return this.shortName;
}
public void setShortName(String name) {
if (name == null || (name != null && name.length() == 0)){
throw new IllegalArgumentException("The short name cannot be null or empty.");
}
this.shortName = name;
}
public String getFullName() {
return this.fullName;
}
public void setFullName(String name) {
this.fullName = name;
}
public Collection<Xref> getIdentifiers() {
if (identifiers == null){
initialiseIdentifiers();
}
return this.identifiers;
}
/**
*
* @return the first identifier in the list of identifiers or null if the list is empty
*/
public Xref getPreferredIdentifier() {
return !identifiers.isEmpty() ? identifiers.iterator().next() : null;
}
public Collection<Checksum> getChecksums() {
if (checksums == null){
initialiseChecksums();
}
return this.checksums;
}
public Collection<Xref> getXrefs() {
if (xrefs == null){
initialiseXrefs();
}
return this.xrefs;
}
public Collection<Annotation> getAnnotations() {
if (annotations == null){
initialiseAnnotations();
}
return this.annotations;
}
public Collection<Alias> getAliases() {
if (aliases == null){
initialiseAliases();
}
return this.aliases;
}
public Organism getOrganism() {
return this.organism;
}
public void setOrganism(Organism organism) {
this.organism = organism;
}
public CvTerm getInteractorType() {
return this.interactorType;
}
public void setInteractorType(CvTerm interactorType) {
if (interactorType == null){
this.interactorType = CvTermUtils.createUnknownInteractorType();
}
else {
this.interactorType = interactorType;
}
}
@Override
public String toString() {
return shortName + (organism != null ? ", " + organism.toString() : "") + (interactorType != null ? ", " + interactorType.toString() : "") ;
}
}
|
DefaultInteractor : fixed A bug in getPreferredIdentifier
|
jami-core/src/main/java/psidev/psi/mi/jami/model/impl/DefaultInteractor.java
|
DefaultInteractor : fixed A bug in getPreferredIdentifier
|
|
Java
|
apache-2.0
|
e1f9a38f9f2592201ad324aa0596636a1ae0d25a
| 0
|
joansmith/orientdb,joansmith/orientdb,mmacfadden/orientdb,sanyaade-g2g-repos/orientdb,wyzssw/orientdb,rprabhat/orientdb,sanyaade-g2g-repos/orientdb,rprabhat/orientdb,wyzssw/orientdb,mmacfadden/orientdb,sanyaade-g2g-repos/orientdb,rprabhat/orientdb,allanmoso/orientdb,allanmoso/orientdb,joansmith/orientdb,wyzssw/orientdb,giastfader/orientdb,giastfader/orientdb,allanmoso/orientdb,mmacfadden/orientdb,wyzssw/orientdb,allanmoso/orientdb,rprabhat/orientdb,giastfader/orientdb,giastfader/orientdb,sanyaade-g2g-repos/orientdb,joansmith/orientdb,mmacfadden/orientdb
|
/*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.serialization.serializer.record.binary;
import com.orientechnologies.common.collection.OMultiValue;
import com.orientechnologies.common.serialization.types.ODecimalSerializer;
import com.orientechnologies.common.serialization.types.OIntegerSerializer;
import com.orientechnologies.common.serialization.types.OLongSerializer;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.ORecordLazyList;
import com.orientechnologies.orient.core.db.record.ORecordLazyMap;
import com.orientechnologies.orient.core.db.record.ORecordLazyMultiValue;
import com.orientechnologies.orient.core.db.record.ORecordLazySet;
import com.orientechnologies.orient.core.db.record.OTrackedList;
import com.orientechnologies.orient.core.db.record.OTrackedMap;
import com.orientechnologies.orient.core.db.record.OTrackedSet;
import com.orientechnologies.orient.core.db.record.ridbag.ORidBag;
import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.exception.OSerializationException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OGlobalProperty;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ODocumentEntry;
import com.orientechnologies.orient.core.record.impl.ODocumentInternal;
import com.orientechnologies.orient.core.serialization.ODocumentSerializable;
import com.orientechnologies.orient.core.serialization.OSerializableStream;
import com.orientechnologies.orient.core.serialization.serializer.ONetworkThreadLocalSerializer;
import com.orientechnologies.orient.core.storage.OStorageProxy;
import com.orientechnologies.orient.core.type.tree.OMVRBTreeRIDSet;
import com.orientechnologies.orient.core.util.ODateHelper;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
public class ORecordSerializerBinaryV0 implements ODocumentSerializer {
private static final String CHARSET_UTF_8 = "UTF-8";
private static final ORecordId NULL_RECORD_ID = new ORecordId(-2, ORID.CLUSTER_POS_INVALID);
private static final long MILLISEC_PER_DAY = 86400000;
public ORecordSerializerBinaryV0() {
}
public void deserializePartial(final ODocument document, final BytesContainer bytes, final String[] iFields) {
final String className = readString(bytes);
if (className.length() != 0)
ODocumentInternal.fillClassNameIfNeeded(document, className);
// TRANSFORMS FIELDS FOM STRINGS TO BYTE[]
final byte[][] fields = new byte[iFields.length][];
for (int i = 0; i < iFields.length; ++i)
fields[i] = iFields[i].getBytes();
String fieldName = null;
int valuePos;
OType type;
int unmarshalledFields = 0;
while (true) {
final int len = OVarIntSerializer.readAsInteger(bytes);
if (len == 0) {
// SCAN COMPLETED
break;
} else if (len > 0) {
// CHECK BY FIELD NAME SIZE: THIS AVOID EVEN THE UNMARSHALLING OF FIELD NAME
boolean match = false;
for (int i = 0; i < iFields.length; ++i) {
if (iFields[i].length() == len) {
boolean matchField = true;
for (int j = 0; j < len; ++j) {
if (bytes.bytes[bytes.offset + j] != fields[i][j]) {
matchField = false;
break;
}
}
if (matchField) {
fieldName = iFields[i];
unmarshalledFields++;
bytes.skip(len);
match = true;
break;
}
}
}
if (!match) {
// SKIP IT
bytes.skip(len + OIntegerSerializer.INT_SIZE + 1);
continue;
}
valuePos = readInteger(bytes);
type = readOType(bytes);
} else {
// LOAD GLOBAL PROPERTY BY ID
OGlobalProperty prop = getGlobalProperty(document, len);
fieldName = prop.getName();
valuePos = readInteger(bytes);
if (prop.getType() != OType.ANY)
type = prop.getType();
else
type = readOType(bytes);
}
if (valuePos != 0) {
int headerCursor = bytes.offset;
bytes.offset = valuePos;
final Object value = readSingleValue(bytes, type, document);
bytes.offset = headerCursor;
ODocumentInternal.rawField(document, fieldName, value, type);
} else
ODocumentInternal.rawField(document, fieldName, null, null);
if (unmarshalledFields == iFields.length)
// ALL REQUESTED FIELDS UNMARSHALLED: EXIT
break;
}
}
@Override
public void deserialize(final ODocument document, final BytesContainer bytes) {
final String className = readString(bytes);
if (className.length() != 0)
ODocumentInternal.fillClassNameIfNeeded(document, className);
int last = 0;
String fieldName;
int valuePos;
OType type;
while (true) {
OGlobalProperty prop = null;
final int len = OVarIntSerializer.readAsInteger(bytes);
if (len == 0) {
// SCAN COMPLETED
break;
} else if (len > 0) {
// PARSE FIELD NAME
fieldName = stringFromBytes(bytes.bytes, bytes.offset, len).intern();
bytes.skip(len);
valuePos = readInteger(bytes);
type = readOType(bytes);
} else {
// LOAD GLOBAL PROPERTY BY ID
prop = getGlobalProperty(document, len);
fieldName = prop.getName();
valuePos = readInteger(bytes);
if (prop.getType() != OType.ANY)
type = prop.getType();
else
type = readOType(bytes);
}
if (ODocumentInternal.rawContainsField(document, fieldName)) {
continue;
}
if (valuePos != 0) {
int headerCursor = bytes.offset;
bytes.offset = valuePos;
final Object value = readSingleValue(bytes, type, document);
if (bytes.offset > last)
last = bytes.offset;
bytes.offset = headerCursor;
ODocumentInternal.rawField(document, fieldName, value, type);
} else
ODocumentInternal.rawField(document, fieldName, null, null);
}
ORecordInternal.clearSource(document);
if (last > bytes.offset)
bytes.offset = last;
}
@SuppressWarnings("unchecked")
@Override
public void serialize(final ODocument document, final BytesContainer bytes, final boolean iClassOnly) {
final OClass clazz = serializeClass(document, bytes);
if (iClassOnly) {
writeEmptyString(bytes);
return;
}
final Map<String, OProperty> props = clazz != null ? clazz.propertiesMap() : null;
final Set<Entry<String, ODocumentEntry>> fields = ODocumentInternal.rawEntries(document);
final int[] pos = new int[fields.size()];
int i = 0;
final Entry<String, ODocumentEntry> values[] = new Entry[fields.size()];
for (Entry<String, ODocumentEntry> entry : fields) {
if (!entry.getValue().exist())
continue;
if (entry.getValue().property == null && props != null)
entry.getValue().property = props.get(entry.getKey());
if (entry.getValue().property != null) {
OVarIntSerializer.write(bytes, (entry.getValue().property.getId() + 1) * -1);
if (entry.getValue().property.getType() != OType.ANY)
pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE);
else
pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE + 1);
} else {
writeString(bytes, entry.getKey());
pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE + 1);
}
values[i] = entry;
i++;
}
writeEmptyString(bytes);
int size = i;
for (i = 0; i < size; i++) {
int pointer = 0;
final Object value = values[i].getValue().value;
if (value != null) {
final OType type = getFieldType(values[i].getValue());
if (type == null) {
throw new OSerializationException("Impossible serialize value of type " + value.getClass()
+ " with the ODocument binary serializer");
}
pointer = writeSingleValue(bytes, value, type, getLinkedType(document, type, values[i].getKey()));
OIntegerSerializer.INSTANCE.serializeLiteral(pointer, bytes.bytes, pos[i]);
if (values[i].getValue().property == null || values[i].getValue().property.getType() == OType.ANY)
writeOType(bytes, (pos[i] + OIntegerSerializer.INT_SIZE), type);
}
}
}
protected OClass serializeClass(final ODocument document, final BytesContainer bytes) {
final OClass clazz = ODocumentInternal.getImmutableSchemaClass(document);
if (clazz != null)
writeString(bytes, clazz.getName());
else
writeEmptyString(bytes);
return clazz;
}
protected OGlobalProperty getGlobalProperty(final ODocument document, final int len) {
final int id = (len * -1) - 1;
return ODocumentInternal.getGlobalPropertyById(document, id);
}
protected OType readOType(final BytesContainer bytes) {
return OType.getById(readByte(bytes));
}
private void writeOType(BytesContainer bytes, int pos, OType type) {
bytes.bytes[pos] = (byte) type.getId();
}
protected Object readSingleValue(BytesContainer bytes, OType type, ODocument document) {
Object value = null;
switch (type) {
case INTEGER:
value = OVarIntSerializer.readAsInteger(bytes);
break;
case LONG:
value = OVarIntSerializer.readAsLong(bytes);
break;
case SHORT:
value = OVarIntSerializer.readAsShort(bytes);
break;
case STRING:
value = readString(bytes);
break;
case DOUBLE:
value = Double.longBitsToDouble(readLong(bytes));
break;
case FLOAT:
value = Float.intBitsToFloat(readInteger(bytes));
break;
case BYTE:
value = readByte(bytes);
break;
case BOOLEAN:
value = readByte(bytes) == 1;
break;
case DATETIME:
value = new Date(OVarIntSerializer.readAsLong(bytes));
break;
case DATE:
long savedTime = OVarIntSerializer.readAsLong(bytes) * MILLISEC_PER_DAY;
int offset = ODateHelper.getDatabaseTimeZone().getOffset(savedTime);
value = new Date(savedTime - offset);
break;
case EMBEDDED:
value = new ODocument();
deserialize((ODocument) value, bytes);
if (((ODocument) value).containsField(ODocumentSerializable.CLASS_NAME)) {
String className = ((ODocument) value).field(ODocumentSerializable.CLASS_NAME);
try {
Class<?> clazz = Class.forName(className);
ODocumentSerializable newValue = (ODocumentSerializable) clazz.newInstance();
newValue.fromDocument((ODocument) value);
value = newValue;
} catch (Exception e) {
throw new RuntimeException(e);
}
} else
ODocumentInternal.addOwner((ODocument) value, document);
break;
case EMBEDDEDSET:
value = readEmbeddedCollection(bytes, new OTrackedSet<Object>(document), document);
break;
case EMBEDDEDLIST:
value = readEmbeddedCollection(bytes, new OTrackedList<Object>(document), document);
break;
case LINKSET:
value = readLinkCollection(bytes, new ORecordLazySet(document));
break;
case LINKLIST:
value = readLinkCollection(bytes, new ORecordLazyList(document));
break;
case BINARY:
value = readBinary(bytes);
break;
case LINK:
value = readOptimizedLink(bytes);
break;
case LINKMAP:
value = readLinkMap(bytes, document);
break;
case EMBEDDEDMAP:
value = readEmbeddedMap(bytes, document);
break;
case DECIMAL:
value = ODecimalSerializer.INSTANCE.deserialize(bytes.bytes, bytes.offset);
bytes.skip(ODecimalSerializer.INSTANCE.getObjectSize(bytes.bytes, bytes.offset));
break;
case LINKBAG:
ORidBag bag = new ORidBag();
bag.fromStream(bytes);
bag.setOwner(document);
value = bag;
break;
case TRANSIENT:
break;
case CUSTOM:
try {
String className = readString(bytes);
Class<?> clazz = Class.forName(className);
OSerializableStream stream = (OSerializableStream) clazz.newInstance();
stream.fromStream(readBinary(bytes));
if (stream instanceof OSerializableWrapper)
value = ((OSerializableWrapper) stream).getSerializable();
else
value = stream;
} catch (Exception e) {
throw new RuntimeException(e);
}
break;
case ANY:
break;
}
return value;
}
private byte[] readBinary(BytesContainer bytes) {
int n = OVarIntSerializer.readAsInteger(bytes);
byte[] newValue = new byte[n];
System.arraycopy(bytes.bytes, bytes.offset, newValue, 0, newValue.length);
bytes.skip(n);
return newValue;
}
private Map<Object, OIdentifiable> readLinkMap(final BytesContainer bytes, final ODocument document) {
int size = OVarIntSerializer.readAsInteger(bytes);
Map<Object, OIdentifiable> result = new ORecordLazyMap(document);
while ((size--) > 0) {
OType keyType = readOType(bytes);
Object key = readSingleValue(bytes, keyType, document);
ORecordId value = readOptimizedLink(bytes);
if (value.equals(NULL_RECORD_ID))
result.put(key, null);
else
result.put(key, value);
}
return result;
}
private Object readEmbeddedMap(final BytesContainer bytes, final ODocument document) {
int size = OVarIntSerializer.readAsInteger(bytes);
final Map<Object, Object> result = new OTrackedMap<Object>(document);
int last = 0;
while ((size--) > 0) {
OType keyType = readOType(bytes);
Object key = readSingleValue(bytes, keyType, document);
final int valuePos = readInteger(bytes);
final OType type = readOType(bytes);
if (valuePos != 0) {
int headerCursor = bytes.offset;
bytes.offset = valuePos;
Object value = readSingleValue(bytes, type, document);
if (bytes.offset > last)
last = bytes.offset;
bytes.offset = headerCursor;
result.put(key, value);
} else
result.put(key, null);
}
if (last > bytes.offset)
bytes.offset = last;
return result;
}
private Collection<OIdentifiable> readLinkCollection(BytesContainer bytes, Collection<OIdentifiable> found) {
final int items = OVarIntSerializer.readAsInteger(bytes);
for (int i = 0; i < items; i++) {
ORecordId id = readOptimizedLink(bytes);
if (id.equals(NULL_RECORD_ID))
found.add(null);
else
found.add(id);
}
return found;
}
private ORecordId readOptimizedLink(final BytesContainer bytes) {
return new ORecordId(OVarIntSerializer.readAsInteger(bytes), OVarIntSerializer.readAsLong(bytes));
}
private Collection<?> readEmbeddedCollection(final BytesContainer bytes, final Collection<Object> found, final ODocument document) {
final int items = OVarIntSerializer.readAsInteger(bytes);
OType type = readOType(bytes);
if (type == OType.ANY) {
for (int i = 0; i < items; i++) {
OType itemType = readOType(bytes);
if (itemType == OType.ANY)
found.add(null);
else
found.add(readSingleValue(bytes, itemType, document));
}
return found;
}
// TODO: manage case where type is known
return null;
}
private OType getLinkedType(ODocument document, OType type, String key) {
if (type != OType.EMBEDDEDLIST && type != OType.EMBEDDEDSET && type != OType.EMBEDDEDMAP)
return null;
OClass immutableClass = ODocumentInternal.getImmutableSchemaClass(document);
if (immutableClass != null) {
OProperty prop = immutableClass.getProperty(key);
if (prop != null) {
return prop.getLinkedType();
}
}
return null;
}
@SuppressWarnings("unchecked")
private int writeSingleValue(BytesContainer bytes, Object value, OType type, OType linkedType) {
int pointer = 0;
switch (type) {
case INTEGER:
case LONG:
case SHORT:
pointer = OVarIntSerializer.write(bytes, ((Number) value).longValue());
break;
case STRING:
pointer = writeString(bytes, value.toString());
break;
case DOUBLE:
long dg = Double.doubleToLongBits((Double) value);
pointer = bytes.alloc(OLongSerializer.LONG_SIZE);
OLongSerializer.INSTANCE.serializeLiteral(dg, bytes.bytes, pointer);
break;
case FLOAT:
int fg = Float.floatToIntBits((Float) value);
pointer = bytes.alloc(OIntegerSerializer.INT_SIZE);
OIntegerSerializer.INSTANCE.serializeLiteral(fg, bytes.bytes, pointer);
break;
case BYTE:
pointer = bytes.alloc(1);
bytes.bytes[pointer] = (Byte) value;
break;
case BOOLEAN:
pointer = bytes.alloc(1);
bytes.bytes[pointer] = ((Boolean) value) ? (byte) 1 : (byte) 0;
break;
case DATETIME:
if (value instanceof Long) {
pointer = OVarIntSerializer.write(bytes, (Long) value);
} else
pointer = OVarIntSerializer.write(bytes, ((Date) value).getTime());
break;
case DATE:
long dateValue;
if (value instanceof Long) {
dateValue = (Long) value;
} else
dateValue = ((Date) value).getTime();
int offset = ODateHelper.getDatabaseTimeZone().getOffset(dateValue);
pointer = OVarIntSerializer.write(bytes, (dateValue + offset) / MILLISEC_PER_DAY);
break;
case EMBEDDED:
pointer = bytes.offset;
if (value instanceof ODocumentSerializable) {
ODocument cur = ((ODocumentSerializable) value).toDocument();
cur.field(ODocumentSerializable.CLASS_NAME, value.getClass().getName());
serialize(cur, bytes, false);
} else {
serialize((ODocument) value, bytes, false);
}
break;
case EMBEDDEDSET:
case EMBEDDEDLIST:
if (value.getClass().isArray())
pointer = writeEmbeddedCollection(bytes, Arrays.asList(OMultiValue.array(value)), linkedType);
else
pointer = writeEmbeddedCollection(bytes, (Collection<?>) value, linkedType);
break;
case DECIMAL:
BigDecimal decimalValue = (BigDecimal) value;
pointer = bytes.alloc(ODecimalSerializer.INSTANCE.getObjectSize(decimalValue));
ODecimalSerializer.INSTANCE.serialize(decimalValue, bytes.bytes, pointer);
break;
case BINARY:
pointer = writeBinary(bytes, (byte[]) (value));
break;
case LINKSET:
case LINKLIST:
Collection<OIdentifiable> ridCollection = (Collection<OIdentifiable>) value;
pointer = writeLinkCollection(bytes, ridCollection);
break;
case LINK:
pointer = writeOptimizedLink(bytes, (OIdentifiable) value);
break;
case LINKMAP:
pointer = writeLinkMap(bytes, (Map<Object, OIdentifiable>) value);
break;
case EMBEDDEDMAP:
pointer = writeEmbeddedMap(bytes, (Map<Object, Object>) value);
break;
case LINKBAG:
pointer = ((ORidBag) value).toStream(bytes);
break;
case CUSTOM:
if (!(value instanceof OSerializableStream))
value = new OSerializableWrapper((Serializable) value);
pointer = writeString(bytes, value.getClass().getName());
writeBinary(bytes, ((OSerializableStream) value).toStream());
break;
case TRANSIENT:
break;
case ANY:
break;
}
return pointer;
}
private int writeBinary(BytesContainer bytes, byte[] valueBytes) {
int pointer;
pointer = OVarIntSerializer.write(bytes, valueBytes.length);
int start = bytes.alloc(valueBytes.length);
System.arraycopy(valueBytes, 0, bytes.bytes, start, valueBytes.length);
return pointer;
}
private int writeLinkMap(BytesContainer bytes, Map<Object, OIdentifiable> map) {
int fullPos = OVarIntSerializer.write(bytes, map.size());
for (Entry<Object, OIdentifiable> entry : map.entrySet()) {
// TODO:check skip of complex types
// FIXME: changed to support only string key on map
OType type = OType.STRING;
writeOType(bytes, bytes.alloc(1), type);
writeString(bytes, entry.getKey().toString());
if (entry.getValue() == null)
writeNullLink(bytes);
else
writeOptimizedLink(bytes, entry.getValue());
}
return fullPos;
}
@SuppressWarnings("unchecked")
private int writeEmbeddedMap(BytesContainer bytes, Map<Object, Object> map) {
int[] pos = new int[map.size()];
int i = 0;
Entry<Object, Object> values[] = new Entry[map.size()];
int fullPos = OVarIntSerializer.write(bytes, map.size());
for (Entry<Object, Object> entry : map.entrySet()) {
// TODO:check skip of complex types
// FIXME: changed to support only string key on map
OType type = OType.STRING;
writeOType(bytes, bytes.alloc(1), type);
writeString(bytes, entry.getKey().toString());
pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE + 1);
values[i] = entry;
i++;
}
for (i = 0; i < values.length; i++) {
int pointer = 0;
Object value = values[i].getValue();
if (value != null) {
OType type = getTypeFromValueEmbedded(value);
if (type == null) {
throw new OSerializationException("Impossible serialize value of type " + value.getClass()
+ " with the ODocument binary serializer");
}
pointer = writeSingleValue(bytes, value, type, null);
OIntegerSerializer.INSTANCE.serializeLiteral(pointer, bytes.bytes, pos[i]);
writeOType(bytes, (pos[i] + OIntegerSerializer.INT_SIZE), type);
}
}
return fullPos;
}
private OIdentifiable recursiveLinkSave(OIdentifiable link) {
if (link instanceof ORID) {
if (((ORID) link).isValid() && ((ORID) link).isNew()) {
final ODatabaseDocument database = ODatabaseRecordThreadLocal.INSTANCE.get();
ORecord record = link.getRecord();
if (record != null) {
if (ONetworkThreadLocalSerializer.getNetworkSerializer() != null)
throw new ODatabaseException("Impossible save a record during network serialization");
database.save(record);
return record;
}
}
} else if (link instanceof ORecord) {
ORID rid = link.getIdentity();
if (((ORecord) link).isDirty() || (rid.isTemporary())) {
if (ONetworkThreadLocalSerializer.getNetworkSerializer() != null)
throw new ODatabaseException("Impossible save a record during network serialization");
((ORecord) link).save();
}
}
return link;
}
private int writeNullLink(BytesContainer bytes) {
int pos = OVarIntSerializer.write(bytes, NULL_RECORD_ID.getIdentity().getClusterId());
OVarIntSerializer.write(bytes, NULL_RECORD_ID.getIdentity().getClusterPosition());
return pos;
}
private int writeOptimizedLink(BytesContainer bytes, OIdentifiable link) {
link = recursiveLinkSave(link);
assert link.getIdentity().isValid() || (ODatabaseRecordThreadLocal.INSTANCE.get().getStorage() instanceof OStorageProxy) : "Impossible to serialize invalid link";
int pos = OVarIntSerializer.write(bytes, link.getIdentity().getClusterId());
OVarIntSerializer.write(bytes, link.getIdentity().getClusterPosition());
return pos;
}
private int writeLinkCollection(BytesContainer bytes, Collection<OIdentifiable> value) {
assert (!(value instanceof OMVRBTreeRIDSet));
int pos = OVarIntSerializer.write(bytes, value.size());
final boolean disabledAutoConvertion = value instanceof ORecordLazyMultiValue
&& ((ORecordLazyMultiValue) value).isAutoConvertToRecord();
if (disabledAutoConvertion)
// AVOID TO FETCH RECORD
((ORecordLazyMultiValue) value).setAutoConvertToRecord(false);
try {
for (OIdentifiable itemValue : value) {
// TODO: handle the null links
if (itemValue == null)
writeNullLink(bytes);
else
writeOptimizedLink(bytes, itemValue);
}
} finally {
if (disabledAutoConvertion)
((ORecordLazyMultiValue) value).setAutoConvertToRecord(true);
}
return pos;
}
private int writeEmbeddedCollection(BytesContainer bytes, Collection<?> value, OType linkedType) {
int pos = OVarIntSerializer.write(bytes, value.size());
// TODO manage embedded type from schema and auto-determined.
writeOType(bytes, bytes.alloc(1), OType.ANY);
for (Object itemValue : value) {
// TODO:manage in a better way null entry
if (itemValue == null) {
writeOType(bytes, bytes.alloc(1), OType.ANY);
continue;
}
OType type;
if (linkedType == null)
type = getTypeFromValueEmbedded(itemValue);
else
type = linkedType;
if (type != null) {
writeOType(bytes, bytes.alloc(1), type);
writeSingleValue(bytes, itemValue, type, null);
} else {
throw new OSerializationException("Impossible serialize value of type " + value.getClass()
+ " with the ODocument binary serializer");
}
}
return pos;
}
private OType getFieldType(final ODocumentEntry entry) {
OType type = entry.type;
if (type == null) {
final OProperty prop = entry.property;
if (prop != null)
type = prop.getType();
if (type == null || OType.ANY == type)
type = OType.getTypeByValue(entry.value);
}
return type;
}
private OType getTypeFromValueEmbedded(final Object fieldValue) {
OType type = OType.getTypeByValue(fieldValue);
if (type == OType.LINK && fieldValue instanceof ODocument && !((ODocument) fieldValue).getIdentity().isValid())
type = OType.EMBEDDED;
return type;
}
protected String readString(final BytesContainer bytes) {
final int len = OVarIntSerializer.readAsInteger(bytes);
final String res = stringFromBytes(bytes.bytes, bytes.offset, len);
bytes.skip(len);
return res;
}
protected int readInteger(final BytesContainer container) {
final int value = OIntegerSerializer.INSTANCE.deserializeLiteral(container.bytes, container.offset);
container.offset += OIntegerSerializer.INT_SIZE;
return value;
}
private byte readByte(final BytesContainer container) {
return container.bytes[container.offset++];
}
private long readLong(final BytesContainer container) {
final long value = OLongSerializer.INSTANCE.deserializeLiteral(container.bytes, container.offset);
container.offset += OLongSerializer.LONG_SIZE;
return value;
}
private int writeEmptyString(final BytesContainer bytes) {
return OVarIntSerializer.write(bytes, 0);
}
private int writeString(final BytesContainer bytes, final String toWrite) {
final byte[] nameBytes = bytesFromString(toWrite);
final int pointer = OVarIntSerializer.write(bytes, nameBytes.length);
final int start = bytes.alloc(nameBytes.length);
System.arraycopy(nameBytes, 0, bytes.bytes, start, nameBytes.length);
return pointer;
}
private byte[] bytesFromString(final String toWrite) {
try {
return toWrite.getBytes(CHARSET_UTF_8);
} catch (UnsupportedEncodingException e) {
throw new OSerializationException("Error on string encoding", e);
}
}
protected String stringFromBytes(final byte[] bytes, final int offset, final int len) {
try {
return new String(bytes, offset, len, CHARSET_UTF_8);
} catch (UnsupportedEncodingException e) {
throw new OSerializationException("Error on string decoding", e);
}
}
}
|
core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/binary/ORecordSerializerBinaryV0.java
|
/*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.serialization.serializer.record.binary;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.orientechnologies.common.collection.OMultiValue;
import com.orientechnologies.common.serialization.types.ODecimalSerializer;
import com.orientechnologies.common.serialization.types.OIntegerSerializer;
import com.orientechnologies.common.serialization.types.OLongSerializer;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.ORecordLazyList;
import com.orientechnologies.orient.core.db.record.ORecordLazyMap;
import com.orientechnologies.orient.core.db.record.ORecordLazySet;
import com.orientechnologies.orient.core.db.record.OTrackedList;
import com.orientechnologies.orient.core.db.record.OTrackedMap;
import com.orientechnologies.orient.core.db.record.OTrackedSet;
import com.orientechnologies.orient.core.db.record.ridbag.ORidBag;
import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.exception.OSerializationException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OGlobalProperty;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ODocumentEntry;
import com.orientechnologies.orient.core.record.impl.ODocumentInternal;
import com.orientechnologies.orient.core.serialization.ODocumentSerializable;
import com.orientechnologies.orient.core.serialization.OSerializableStream;
import com.orientechnologies.orient.core.serialization.serializer.ONetworkThreadLocalSerializer;
import com.orientechnologies.orient.core.storage.OStorageProxy;
import com.orientechnologies.orient.core.type.tree.OMVRBTreeRIDSet;
import com.orientechnologies.orient.core.util.ODateHelper;
public class ORecordSerializerBinaryV0 implements ODocumentSerializer {
private static final String CHARSET_UTF_8 = "UTF-8";
private static final ORecordId NULL_RECORD_ID = new ORecordId(-2, ORID.CLUSTER_POS_INVALID);
private static final long MILLISEC_PER_DAY = 86400000;
public ORecordSerializerBinaryV0() {
}
public void deserializePartial(final ODocument document, final BytesContainer bytes, final String[] iFields) {
final String className = readString(bytes);
if (className.length() != 0)
ODocumentInternal.fillClassNameIfNeeded(document, className);
// TRANSFORMS FIELDS FOM STRINGS TO BYTE[]
final byte[][] fields = new byte[iFields.length][];
for (int i = 0; i < iFields.length; ++i)
fields[i] = iFields[i].getBytes();
String fieldName = null;
int valuePos;
OType type;
int unmarshalledFields = 0;
while (true) {
final int len = OVarIntSerializer.readAsInteger(bytes);
if (len == 0) {
// SCAN COMPLETED
break;
} else if (len > 0) {
// CHECK BY FIELD NAME SIZE: THIS AVOID EVEN THE UNMARSHALLING OF FIELD NAME
boolean match = false;
for (int i = 0; i < iFields.length; ++i) {
if (iFields[i].length() == len) {
boolean matchField = true;
for (int j = 0; j < len; ++j) {
if (bytes.bytes[bytes.offset + j] != fields[i][j]) {
matchField = false;
break;
}
}
if (matchField) {
fieldName = iFields[i];
unmarshalledFields++;
bytes.skip(len);
match = true;
break;
}
}
}
if (!match) {
// SKIP IT
bytes.skip(len + OIntegerSerializer.INT_SIZE + 1);
continue;
}
valuePos = readInteger(bytes);
type = readOType(bytes);
} else {
// LOAD GLOBAL PROPERTY BY ID
OGlobalProperty prop = getGlobalProperty(document, len);
fieldName = prop.getName();
valuePos = readInteger(bytes);
if (prop.getType() != OType.ANY)
type = prop.getType();
else
type = readOType(bytes);
}
if (valuePos != 0) {
int headerCursor = bytes.offset;
bytes.offset = valuePos;
final Object value = readSingleValue(bytes, type, document);
bytes.offset = headerCursor;
ODocumentInternal.rawField(document, fieldName, value, type);
} else
ODocumentInternal.rawField(document, fieldName, null, null);
if (unmarshalledFields == iFields.length)
// ALL REQUESTED FIELDS UNMARSHALLED: EXIT
break;
}
}
@Override
public void deserialize(final ODocument document, final BytesContainer bytes) {
final String className = readString(bytes);
if (className.length() != 0)
ODocumentInternal.fillClassNameIfNeeded(document, className);
int last = 0;
String fieldName;
int valuePos;
OType type;
while (true) {
OGlobalProperty prop = null;
final int len = OVarIntSerializer.readAsInteger(bytes);
if (len == 0) {
// SCAN COMPLETED
break;
} else if (len > 0) {
// PARSE FIELD NAME
fieldName = stringFromBytes(bytes.bytes, bytes.offset, len).intern();
bytes.skip(len);
valuePos = readInteger(bytes);
type = readOType(bytes);
} else {
// LOAD GLOBAL PROPERTY BY ID
prop = getGlobalProperty(document, len);
fieldName = prop.getName();
valuePos = readInteger(bytes);
if (prop.getType() != OType.ANY)
type = prop.getType();
else
type = readOType(bytes);
}
if (ODocumentInternal.rawContainsField(document, fieldName)) {
continue;
}
if (valuePos != 0) {
int headerCursor = bytes.offset;
bytes.offset = valuePos;
final Object value = readSingleValue(bytes, type, document);
if (bytes.offset > last)
last = bytes.offset;
bytes.offset = headerCursor;
ODocumentInternal.rawField(document, fieldName, value, type);
} else
ODocumentInternal.rawField(document, fieldName, null, null);
}
ORecordInternal.clearSource(document);
if (last > bytes.offset)
bytes.offset = last;
}
@SuppressWarnings("unchecked")
@Override
public void serialize(final ODocument document, final BytesContainer bytes, final boolean iClassOnly) {
final OClass clazz = serializeClass(document, bytes);
if (iClassOnly) {
writeEmptyString(bytes);
return;
}
final Map<String, OProperty> props = clazz != null ? clazz.propertiesMap() : null;
final Set<Entry<String, ODocumentEntry>> fields = ODocumentInternal.rawEntries(document);
final int[] pos = new int[fields.size()];
int i = 0;
final Entry<String, ODocumentEntry> values[] = new Entry[fields.size()];
for (Entry<String, ODocumentEntry> entry : fields) {
if (!entry.getValue().exist())
continue;
if (entry.getValue().property == null && props != null)
entry.getValue().property = props.get(entry.getKey());
if (entry.getValue().property != null) {
OVarIntSerializer.write(bytes, (entry.getValue().property.getId() + 1) * -1);
if (entry.getValue().property.getType() != OType.ANY)
pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE);
else
pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE + 1);
} else {
writeString(bytes, entry.getKey());
pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE + 1);
}
values[i] = entry;
i++;
}
writeEmptyString(bytes);
int size = i;
for (i = 0; i < size; i++) {
int pointer = 0;
final Object value = values[i].getValue().value;
if (value != null) {
final OType type = getFieldType(values[i].getValue());
if (type == null) {
throw new OSerializationException("Impossible serialize value of type " + value.getClass()
+ " with the ODocument binary serializer");
}
pointer = writeSingleValue(bytes, value, type, getLinkedType(document, type, values[i].getKey()));
OIntegerSerializer.INSTANCE.serializeLiteral(pointer, bytes.bytes, pos[i]);
if (values[i].getValue().property == null || values[i].getValue().property.getType() == OType.ANY)
writeOType(bytes, (pos[i] + OIntegerSerializer.INT_SIZE), type);
}
}
}
protected OClass serializeClass(final ODocument document, final BytesContainer bytes) {
final OClass clazz = ODocumentInternal.getImmutableSchemaClass(document);
if (clazz != null)
writeString(bytes, clazz.getName());
else
writeEmptyString(bytes);
return clazz;
}
protected OGlobalProperty getGlobalProperty(final ODocument document, final int len) {
final int id = (len * -1) - 1;
return ODocumentInternal.getGlobalPropertyById(document, id);
}
protected OType readOType(final BytesContainer bytes) {
return OType.getById(readByte(bytes));
}
private void writeOType(BytesContainer bytes, int pos, OType type) {
bytes.bytes[pos] = (byte) type.getId();
}
protected Object readSingleValue(BytesContainer bytes, OType type, ODocument document) {
Object value = null;
switch (type) {
case INTEGER:
value = OVarIntSerializer.readAsInteger(bytes);
break;
case LONG:
value = OVarIntSerializer.readAsLong(bytes);
break;
case SHORT:
value = OVarIntSerializer.readAsShort(bytes);
break;
case STRING:
value = readString(bytes);
break;
case DOUBLE:
value = Double.longBitsToDouble(readLong(bytes));
break;
case FLOAT:
value = Float.intBitsToFloat(readInteger(bytes));
break;
case BYTE:
value = readByte(bytes);
break;
case BOOLEAN:
value = readByte(bytes) == 1;
break;
case DATETIME:
value = new Date(OVarIntSerializer.readAsLong(bytes));
break;
case DATE:
long savedTime = OVarIntSerializer.readAsLong(bytes) * MILLISEC_PER_DAY;
int offset = ODateHelper.getDatabaseTimeZone().getOffset(savedTime);
value = new Date(savedTime - offset);
break;
case EMBEDDED:
value = new ODocument();
deserialize((ODocument) value, bytes);
if (((ODocument) value).containsField(ODocumentSerializable.CLASS_NAME)) {
String className = ((ODocument) value).field(ODocumentSerializable.CLASS_NAME);
try {
Class<?> clazz = Class.forName(className);
ODocumentSerializable newValue = (ODocumentSerializable) clazz.newInstance();
newValue.fromDocument((ODocument) value);
value = newValue;
} catch (Exception e) {
throw new RuntimeException(e);
}
} else
ODocumentInternal.addOwner((ODocument) value, document);
break;
case EMBEDDEDSET:
value = readEmbeddedCollection(bytes, new OTrackedSet<Object>(document), document);
break;
case EMBEDDEDLIST:
value = readEmbeddedCollection(bytes, new OTrackedList<Object>(document), document);
break;
case LINKSET:
value = readLinkCollection(bytes, new ORecordLazySet(document));
break;
case LINKLIST:
value = readLinkCollection(bytes, new ORecordLazyList(document));
break;
case BINARY:
value = readBinary(bytes);
break;
case LINK:
value = readOptimizedLink(bytes);
break;
case LINKMAP:
value = readLinkMap(bytes, document);
break;
case EMBEDDEDMAP:
value = readEmbeddedMap(bytes, document);
break;
case DECIMAL:
value = ODecimalSerializer.INSTANCE.deserialize(bytes.bytes, bytes.offset);
bytes.skip(ODecimalSerializer.INSTANCE.getObjectSize(bytes.bytes, bytes.offset));
break;
case LINKBAG:
ORidBag bag = new ORidBag();
bag.fromStream(bytes);
bag.setOwner(document);
value = bag;
break;
case TRANSIENT:
break;
case CUSTOM:
try {
String className = readString(bytes);
Class<?> clazz = Class.forName(className);
OSerializableStream stream = (OSerializableStream) clazz.newInstance();
stream.fromStream(readBinary(bytes));
if (stream instanceof OSerializableWrapper)
value = ((OSerializableWrapper) stream).getSerializable();
else
value = stream;
} catch (Exception e) {
throw new RuntimeException(e);
}
break;
case ANY:
break;
}
return value;
}
private byte[] readBinary(BytesContainer bytes) {
int n = OVarIntSerializer.readAsInteger(bytes);
byte[] newValue = new byte[n];
System.arraycopy(bytes.bytes, bytes.offset, newValue, 0, newValue.length);
bytes.skip(n);
return newValue;
}
private Map<Object, OIdentifiable> readLinkMap(final BytesContainer bytes, final ODocument document) {
int size = OVarIntSerializer.readAsInteger(bytes);
Map<Object, OIdentifiable> result = new ORecordLazyMap(document);
while ((size--) > 0) {
OType keyType = readOType(bytes);
Object key = readSingleValue(bytes, keyType, document);
ORecordId value = readOptimizedLink(bytes);
if (value.equals(NULL_RECORD_ID))
result.put(key, null);
else
result.put(key, value);
}
return result;
}
private Object readEmbeddedMap(final BytesContainer bytes, final ODocument document) {
int size = OVarIntSerializer.readAsInteger(bytes);
final Map<Object, Object> result = new OTrackedMap<Object>(document);
int last = 0;
while ((size--) > 0) {
OType keyType = readOType(bytes);
Object key = readSingleValue(bytes, keyType, document);
final int valuePos = readInteger(bytes);
final OType type = readOType(bytes);
if (valuePos != 0) {
int headerCursor = bytes.offset;
bytes.offset = valuePos;
Object value = readSingleValue(bytes, type, document);
if (bytes.offset > last)
last = bytes.offset;
bytes.offset = headerCursor;
result.put(key, value);
} else
result.put(key, null);
}
if (last > bytes.offset)
bytes.offset = last;
return result;
}
private Collection<OIdentifiable> readLinkCollection(BytesContainer bytes, Collection<OIdentifiable> found) {
final int items = OVarIntSerializer.readAsInteger(bytes);
for (int i = 0; i < items; i++) {
ORecordId id = readOptimizedLink(bytes);
if (id.equals(NULL_RECORD_ID))
found.add(null);
else
found.add(id);
}
return found;
}
private ORecordId readOptimizedLink(final BytesContainer bytes) {
return new ORecordId(OVarIntSerializer.readAsInteger(bytes), OVarIntSerializer.readAsLong(bytes));
}
private Collection<?> readEmbeddedCollection(final BytesContainer bytes, final Collection<Object> found, final ODocument document) {
final int items = OVarIntSerializer.readAsInteger(bytes);
OType type = readOType(bytes);
if (type == OType.ANY) {
for (int i = 0; i < items; i++) {
OType itemType = readOType(bytes);
if (itemType == OType.ANY)
found.add(null);
else
found.add(readSingleValue(bytes, itemType, document));
}
return found;
}
// TODO: manage case where type is known
return null;
}
private OType getLinkedType(ODocument document, OType type, String key) {
if (type != OType.EMBEDDEDLIST && type != OType.EMBEDDEDSET && type != OType.EMBEDDEDMAP)
return null;
OClass immutableClass = ODocumentInternal.getImmutableSchemaClass(document);
if (immutableClass != null) {
OProperty prop = immutableClass.getProperty(key);
if (prop != null) {
return prop.getLinkedType();
}
}
return null;
}
@SuppressWarnings("unchecked")
private int writeSingleValue(BytesContainer bytes, Object value, OType type, OType linkedType) {
int pointer = 0;
switch (type) {
case INTEGER:
case LONG:
case SHORT:
pointer = OVarIntSerializer.write(bytes, ((Number) value).longValue());
break;
case STRING:
pointer = writeString(bytes, value.toString());
break;
case DOUBLE:
long dg = Double.doubleToLongBits((Double) value);
pointer = bytes.alloc(OLongSerializer.LONG_SIZE);
OLongSerializer.INSTANCE.serializeLiteral(dg, bytes.bytes, pointer);
break;
case FLOAT:
int fg = Float.floatToIntBits((Float) value);
pointer = bytes.alloc(OIntegerSerializer.INT_SIZE);
OIntegerSerializer.INSTANCE.serializeLiteral(fg, bytes.bytes, pointer);
break;
case BYTE:
pointer = bytes.alloc(1);
bytes.bytes[pointer] = (Byte) value;
break;
case BOOLEAN:
pointer = bytes.alloc(1);
bytes.bytes[pointer] = ((Boolean) value) ? (byte) 1 : (byte) 0;
break;
case DATETIME:
if (value instanceof Long) {
pointer = OVarIntSerializer.write(bytes, (Long) value);
} else
pointer = OVarIntSerializer.write(bytes, ((Date) value).getTime());
break;
case DATE:
long dateValue;
if (value instanceof Long) {
dateValue = (Long) value;
} else
dateValue = ((Date) value).getTime();
int offset = ODateHelper.getDatabaseTimeZone().getOffset(dateValue);
pointer = OVarIntSerializer.write(bytes, (dateValue + offset) / MILLISEC_PER_DAY);
break;
case EMBEDDED:
pointer = bytes.offset;
if (value instanceof ODocumentSerializable) {
ODocument cur = ((ODocumentSerializable) value).toDocument();
cur.field(ODocumentSerializable.CLASS_NAME, value.getClass().getName());
serialize(cur, bytes, false);
} else {
serialize((ODocument) value, bytes, false);
}
break;
case EMBEDDEDSET:
case EMBEDDEDLIST:
if (value.getClass().isArray())
pointer = writeEmbeddedCollection(bytes, Arrays.asList(OMultiValue.array(value)), linkedType);
else
pointer = writeEmbeddedCollection(bytes, (Collection<?>) value, linkedType);
break;
case DECIMAL:
BigDecimal decimalValue = (BigDecimal) value;
pointer = bytes.alloc(ODecimalSerializer.INSTANCE.getObjectSize(decimalValue));
ODecimalSerializer.INSTANCE.serialize(decimalValue, bytes.bytes, pointer);
break;
case BINARY:
pointer = writeBinary(bytes, (byte[]) (value));
break;
case LINKSET:
case LINKLIST:
Collection<OIdentifiable> ridCollection = (Collection<OIdentifiable>) value;
pointer = writeLinkCollection(bytes, ridCollection);
break;
case LINK:
pointer = writeOptimizedLink(bytes, (OIdentifiable) value);
break;
case LINKMAP:
pointer = writeLinkMap(bytes, (Map<Object, OIdentifiable>) value);
break;
case EMBEDDEDMAP:
pointer = writeEmbeddedMap(bytes, (Map<Object, Object>) value);
break;
case LINKBAG:
pointer = ((ORidBag) value).toStream(bytes);
break;
case CUSTOM:
if (!(value instanceof OSerializableStream))
value = new OSerializableWrapper((Serializable) value);
pointer = writeString(bytes, value.getClass().getName());
writeBinary(bytes, ((OSerializableStream) value).toStream());
break;
case TRANSIENT:
break;
case ANY:
break;
}
return pointer;
}
private int writeBinary(BytesContainer bytes, byte[] valueBytes) {
int pointer;
pointer = OVarIntSerializer.write(bytes, valueBytes.length);
int start = bytes.alloc(valueBytes.length);
System.arraycopy(valueBytes, 0, bytes.bytes, start, valueBytes.length);
return pointer;
}
private int writeLinkMap(BytesContainer bytes, Map<Object, OIdentifiable> map) {
int fullPos = OVarIntSerializer.write(bytes, map.size());
for (Entry<Object, OIdentifiable> entry : map.entrySet()) {
// TODO:check skip of complex types
// FIXME: changed to support only string key on map
OType type = OType.STRING;
writeOType(bytes, bytes.alloc(1), type);
writeString(bytes, entry.getKey().toString());
if (entry.getValue() == null)
writeNullLink(bytes);
else
writeOptimizedLink(bytes, entry.getValue());
}
return fullPos;
}
@SuppressWarnings("unchecked")
private int writeEmbeddedMap(BytesContainer bytes, Map<Object, Object> map) {
int[] pos = new int[map.size()];
int i = 0;
Entry<Object, Object> values[] = new Entry[map.size()];
int fullPos = OVarIntSerializer.write(bytes, map.size());
for (Entry<Object, Object> entry : map.entrySet()) {
// TODO:check skip of complex types
// FIXME: changed to support only string key on map
OType type = OType.STRING;
writeOType(bytes, bytes.alloc(1), type);
writeString(bytes, entry.getKey().toString());
pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE + 1);
values[i] = entry;
i++;
}
for (i = 0; i < values.length; i++) {
int pointer = 0;
Object value = values[i].getValue();
if (value != null) {
OType type = getTypeFromValueEmbedded(value);
if (type == null) {
throw new OSerializationException("Impossible serialize value of type " + value.getClass()
+ " with the ODocument binary serializer");
}
pointer = writeSingleValue(bytes, value, type, null);
OIntegerSerializer.INSTANCE.serializeLiteral(pointer, bytes.bytes, pos[i]);
writeOType(bytes, (pos[i] + OIntegerSerializer.INT_SIZE), type);
}
}
return fullPos;
}
private OIdentifiable recursiveLinkSave(OIdentifiable link) {
if (link instanceof ORID) {
if (((ORID) link).isValid() && ((ORID) link).isNew()) {
final ODatabaseDocument database = ODatabaseRecordThreadLocal.INSTANCE.get();
ORecord record = link.getRecord();
if (record != null) {
if (ONetworkThreadLocalSerializer.getNetworkSerializer() != null)
throw new ODatabaseException("Impossible save a record during network serialization");
database.save(record);
return record;
}
}
} else if (link instanceof ORecord) {
ORID rid = link.getIdentity();
if (((ORecord) link).isDirty() || (rid.isTemporary())) {
if (ONetworkThreadLocalSerializer.getNetworkSerializer() != null)
throw new ODatabaseException("Impossible save a record during network serialization");
((ORecord) link).save();
}
}
return link;
}
private int writeNullLink(BytesContainer bytes) {
int pos = OVarIntSerializer.write(bytes, NULL_RECORD_ID.getIdentity().getClusterId());
OVarIntSerializer.write(bytes, NULL_RECORD_ID.getIdentity().getClusterPosition());
return pos;
}
private int writeOptimizedLink(BytesContainer bytes, OIdentifiable link) {
link = recursiveLinkSave(link);
assert link.getIdentity().isValid() || (ODatabaseRecordThreadLocal.INSTANCE.get().getStorage() instanceof OStorageProxy) : "Impossible to serialize invalid link";
int pos = OVarIntSerializer.write(bytes, link.getIdentity().getClusterId());
OVarIntSerializer.write(bytes, link.getIdentity().getClusterPosition());
return pos;
}
private int writeLinkCollection(BytesContainer bytes, Collection<OIdentifiable> value) {
assert (!(value instanceof OMVRBTreeRIDSet));
int pos = OVarIntSerializer.write(bytes, value.size());
for (OIdentifiable itemValue : value) {
// TODO: handle the null links
if (itemValue == null)
writeNullLink(bytes);
else
writeOptimizedLink(bytes, itemValue);
}
return pos;
}
private int writeEmbeddedCollection(BytesContainer bytes, Collection<?> value, OType linkedType) {
int pos = OVarIntSerializer.write(bytes, value.size());
// TODO manage embedded type from schema and auto-determined.
writeOType(bytes, bytes.alloc(1), OType.ANY);
for (Object itemValue : value) {
// TODO:manage in a better way null entry
if (itemValue == null) {
writeOType(bytes, bytes.alloc(1), OType.ANY);
continue;
}
OType type;
if (linkedType == null)
type = getTypeFromValueEmbedded(itemValue);
else
type = linkedType;
if (type != null) {
writeOType(bytes, bytes.alloc(1), type);
writeSingleValue(bytes, itemValue, type, null);
} else {
throw new OSerializationException("Impossible serialize value of type " + value.getClass()
+ " with the ODocument binary serializer");
}
}
return pos;
}
private OType getFieldType(final ODocumentEntry entry) {
OType type = entry.type;
if (type == null) {
final OProperty prop = entry.property;
if (prop != null)
type = prop.getType();
if (type == null || OType.ANY == type)
type = OType.getTypeByValue(entry.value);
}
return type;
}
private OType getTypeFromValueEmbedded(final Object fieldValue) {
OType type = OType.getTypeByValue(fieldValue);
if (type == OType.LINK && fieldValue instanceof ODocument && !((ODocument) fieldValue).getIdentity().isValid())
type = OType.EMBEDDED;
return type;
}
protected String readString(final BytesContainer bytes) {
final int len = OVarIntSerializer.readAsInteger(bytes);
final String res = stringFromBytes(bytes.bytes, bytes.offset, len);
bytes.skip(len);
return res;
}
protected int readInteger(final BytesContainer container) {
final int value = OIntegerSerializer.INSTANCE.deserializeLiteral(container.bytes, container.offset);
container.offset += OIntegerSerializer.INT_SIZE;
return value;
}
private byte readByte(final BytesContainer container) {
return container.bytes[container.offset++];
}
private long readLong(final BytesContainer container) {
final long value = OLongSerializer.INSTANCE.deserializeLiteral(container.bytes, container.offset);
container.offset += OLongSerializer.LONG_SIZE;
return value;
}
private int writeEmptyString(final BytesContainer bytes) {
return OVarIntSerializer.write(bytes, 0);
}
private int writeString(final BytesContainer bytes, final String toWrite) {
final byte[] nameBytes = bytesFromString(toWrite);
final int pointer = OVarIntSerializer.write(bytes, nameBytes.length);
final int start = bytes.alloc(nameBytes.length);
System.arraycopy(nameBytes, 0, bytes.bytes, start, nameBytes.length);
return pointer;
}
private byte[] bytesFromString(final String toWrite) {
try {
return toWrite.getBytes(CHARSET_UTF_8);
} catch (UnsupportedEncodingException e) {
throw new OSerializationException("Error on string encoding", e);
}
}
protected String stringFromBytes(final byte[] bytes, final int offset, final int len) {
try {
return new String(bytes, offset, len, CHARSET_UTF_8);
} catch (UnsupportedEncodingException e) {
throw new OSerializationException("Error on string decoding", e);
}
}
}
|
Optimized fetching of collection of LINKs
|
core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/binary/ORecordSerializerBinaryV0.java
|
Optimized fetching of collection of LINKs
|
|
Java
|
apache-2.0
|
b1d01a767bf4ebbbf9930d822e41768c213649c0
| 0
|
gradle/gradle,gradle/gradle,blindpirate/gradle,lsmaira/gradle,blindpirate/gradle,blindpirate/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,gstevey/gradle,robinverduijn/gradle,robinverduijn/gradle,lsmaira/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,lsmaira/gradle,robinverduijn/gradle,blindpirate/gradle,gstevey/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,gstevey/gradle,gstevey/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,gstevey/gradle,gstevey/gradle,gradle/gradle,robinverduijn/gradle,gstevey/gradle,blindpirate/gradle,lsmaira/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,gstevey/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle
|
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.nativebinaries.toolchain.internal.msvcpp;
import org.gradle.api.internal.tasks.compile.ArgWriter;
import org.gradle.nativebinaries.toolchain.internal.OptionsFileArgsTransformer;
import java.io.File;
/**
* Uses an option file for arguments passed to Visual C++.
*/
class VisualCppOptionsFileArgTransformer extends OptionsFileArgsTransformer {
public VisualCppOptionsFileArgTransformer(File tempDir) {
super(ArgWriter.windowsStyleFactory(), tempDir);
}
}
|
subprojects/cpp/src/main/groovy/org/gradle/nativebinaries/toolchain/internal/msvcpp/VisualCppOptionsFileArgTransformer.java
|
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.nativebinaries.toolchain.internal.msvcpp;
import org.gradle.api.internal.tasks.compile.ArgWriter;
import org.gradle.nativebinaries.toolchain.internal.OptionsFileArgsTransformer;
import java.io.File;
/**
* Uses an option file for arguments passed to GCC if possible.
* Certain GCC options do not function correctly when included in an option file, so include these directly on the command line as well.
*/
class VisualCppOptionsFileArgTransformer extends OptionsFileArgsTransformer {
public VisualCppOptionsFileArgTransformer(File tempDir) {
super(ArgWriter.windowsStyleFactory(), tempDir);
}
}
|
Javadoc fix.
|
subprojects/cpp/src/main/groovy/org/gradle/nativebinaries/toolchain/internal/msvcpp/VisualCppOptionsFileArgTransformer.java
|
Javadoc fix.
|
|
Java
|
apache-2.0
|
9ddd8727a92b52d2c042907d3d78e549b438a6dd
| 0
|
hhu94/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,zimingd/Synapse-Repository-Services
|
package org.sagebionetworks.dynamo.dao;
import java.util.Date;
import java.util.Locale;
import org.sagebionetworks.StackConfiguration;
import org.sagebionetworks.dynamo.DynamoTable;
import org.sagebionetworks.dynamo.KeyValueSplitter;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBAttribute;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBHashKey;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBIgnore;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBRangeKey;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBTable;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBVersionAttribute;
/**
* Maps directly to the DynamoDB table named NodeLineage.
*
* @author Eric Wu
*/
@DynamoDBTable(tableName=DboNodeLineage.TABLE_NAME)
public class DboNodeLineage implements DynamoTable {
public static final String TABLE_NAME = "NodeLineage";
public static final String HASH_KEY_NAME = "NodeId" + KeyValueSplitter.SEPARATOR + "LineageType";
public static final String RANGE_KEY_NAME = "Distance" + KeyValueSplitter.SEPARATOR + "NodeId";
/**
* This is the root pointer to help locate the root node. This virtual node should not be linked
* with any other node except having the root as its direct child.
*/
static final String ROOT = "ROOT" + "-"
+ StackConfiguration.getStack() + "-"
+ StackConfiguration.getStackInstance();
static final String ROOT_HASH_KEY = DboNodeLineage.createHashKey(ROOT, LineageType.DESCENDANT);
/**
* Maximum depth allowed on the tree
*/
static final int MAX_DEPTH = 100;
private static final int NUM_DIGITS = (int)Math.log10(MAX_DEPTH);
/**
* Creates the composite hash key from node ID and lineage type. Example hash key, "382739#A".
*/
static String createHashKey(final String nodeId, final LineageType lineageType) {
if (nodeId == null) {
throw new NullPointerException();
}
if (lineageType == null) {
throw new NullPointerException();
}
return nodeId + KeyValueSplitter.SEPARATOR + lineageType.getType();
}
/**
* Creates the composite range key from distance and node ID. Example range key, "005#95373".
*/
static String createRangeKey(final int distance, final String ancOrDescId) {
if (distance < 0) {
throw new IllegalArgumentException("Distance must be at least 0.");
}
if (ancOrDescId == null) {
throw new NullPointerException();
}
// Pad left with zeros so that distance can be sorted correctly as strings by DynamoDB.
// We shouldn't have nodes that are more than max-depth deep
String format = "%0" + NUM_DIGITS + "d";
return String.format(Locale.ROOT, format, distance) + KeyValueSplitter.SEPARATOR + ancOrDescId;
}
@DynamoDBHashKey(attributeName=DboNodeLineage.HASH_KEY_NAME)
public String getHashKey() {
return this.nodeIdLineageType;
}
public void setHashKey(String nodeIdLineageType) {
this.nodeIdLineageType = nodeIdLineageType;
}
@DynamoDBRangeKey(attributeName=DboNodeLineage.RANGE_KEY_NAME)
public String getRangeKey() {
return this.distanceNodeId;
}
public void setRangeKey(String distanceNodeId) {
this.distanceNodeId = distanceNodeId;
}
@DynamoDBVersionAttribute
public Long getVersion() {
return this.version;
}
public void setVersion(Long version) {
this.version = version;
}
@DynamoDBAttribute
public Date getTimestamp() {
return this.timestamp;
}
public void setTimestamp(Date timestamp) {
this.timestamp = timestamp;
}
@DynamoDBIgnore
@Override
public String toString() {
return "DboNodeLineage [nodeIdLineageType=" + nodeIdLineageType
+ ", distanceNodeId=" + distanceNodeId + ", version=" + version
+ ", timestamp=" + timestamp + "]";
}
private String nodeIdLineageType;
private String distanceNodeId;
private Long version;
private Date timestamp;
}
|
lib/lib-dynamo/src/main/java/org/sagebionetworks/dynamo/dao/DboNodeLineage.java
|
package org.sagebionetworks.dynamo.dao;
import java.util.Date;
import java.util.Locale;
import org.sagebionetworks.dynamo.DynamoTable;
import org.sagebionetworks.dynamo.KeyValueSplitter;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBAttribute;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBHashKey;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBIgnore;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBRangeKey;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBTable;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBVersionAttribute;
/**
* Maps directly to the DynamoDB table named NodeLineage.
*
* @author Eric Wu
*/
@DynamoDBTable(tableName=DboNodeLineage.TABLE_NAME)
public class DboNodeLineage implements DynamoTable {
public static final String TABLE_NAME = "NodeLineage";
public static final String HASH_KEY_NAME = "NodeId" + KeyValueSplitter.SEPARATOR + "LineageType";
public static final String RANGE_KEY_NAME = "Distance" + KeyValueSplitter.SEPARATOR + "NodeId";
/**
* This is the root pointer to help locate the root node. This virtual node should not be linked
* with any other node except having the root as its direct child.
*/
static final String ROOT = "ROOT";
static final String ROOT_HASH_KEY = DboNodeLineage.createHashKey(ROOT, LineageType.DESCENDANT);
/**
* Maximum depth allowed on the tree
*/
static final int MAX_DEPTH = 100;
private static final int NUM_DIGITS = (int)Math.log10(MAX_DEPTH);
/**
* Creates the composite hash key from node ID and lineage type. Example hash key, "382739#A".
*/
static String createHashKey(final String nodeId, final LineageType lineageType) {
if (nodeId == null) {
throw new NullPointerException();
}
if (lineageType == null) {
throw new NullPointerException();
}
return nodeId + KeyValueSplitter.SEPARATOR + lineageType.getType();
}
/**
* Creates the composite range key from distance and node ID. Example range key, "005#95373".
*/
static String createRangeKey(final int distance, final String ancOrDescId) {
if (distance < 0) {
throw new IllegalArgumentException("Distance must be at least 0.");
}
if (ancOrDescId == null) {
throw new NullPointerException();
}
// Pad left with zeros so that distance can be sorted correctly as strings by DynamoDB.
// We shouldn't have nodes that are more than max-depth deep
String format = "%0" + NUM_DIGITS + "d";
return String.format(Locale.ROOT, format, distance) + KeyValueSplitter.SEPARATOR + ancOrDescId;
}
@DynamoDBHashKey(attributeName=DboNodeLineage.HASH_KEY_NAME)
public String getHashKey() {
return this.nodeIdLineageType;
}
public void setHashKey(String nodeIdLineageType) {
this.nodeIdLineageType = nodeIdLineageType;
}
@DynamoDBRangeKey(attributeName=DboNodeLineage.RANGE_KEY_NAME)
public String getRangeKey() {
return this.distanceNodeId;
}
public void setRangeKey(String distanceNodeId) {
this.distanceNodeId = distanceNodeId;
}
@DynamoDBVersionAttribute
public Long getVersion() {
return this.version;
}
public void setVersion(Long version) {
this.version = version;
}
@DynamoDBAttribute
public Date getTimestamp() {
return this.timestamp;
}
public void setTimestamp(Date timestamp) {
this.timestamp = timestamp;
}
@DynamoDBIgnore
@Override
public String toString() {
return "DboNodeLineage [nodeIdLineageType=" + nodeIdLineageType
+ ", distanceNodeId=" + distanceNodeId + ", version=" + version
+ ", timestamp=" + timestamp + "]";
}
private String nodeIdLineageType;
private String distanceNodeId;
private Long version;
private Date timestamp;
}
|
Salt the root with stack instance
|
lib/lib-dynamo/src/main/java/org/sagebionetworks/dynamo/dao/DboNodeLineage.java
|
Salt the root with stack instance
|
|
Java
|
apache-2.0
|
e94ee0fc1431e77764d5dc915cad7d414d054208
| 0
|
omindu/carbon-identity-framework,omindu/carbon-identity-framework,wso2/carbon-identity-framework,wso2/carbon-identity-framework,omindu/carbon-identity-framework,wso2/carbon-identity-framework,omindu/carbon-identity-framework,wso2/carbon-identity-framework
|
/*
* Copyright (c) 2014 WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.idp.mgt;
import org.apache.axiom.om.util.Base64;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.base.ServerConfiguration;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.core.util.KeyStoreManager;
import org.wso2.carbon.identity.application.common.ApplicationAuthenticatorService;
import org.wso2.carbon.identity.application.common.ProvisioningConnectorService;
import org.wso2.carbon.identity.application.common.model.ClaimConfig;
import org.wso2.carbon.identity.application.common.model.ClaimMapping;
import org.wso2.carbon.identity.application.common.model.FederatedAuthenticatorConfig;
import org.wso2.carbon.identity.application.common.model.IdentityProvider;
import org.wso2.carbon.identity.application.common.model.IdentityProviderProperty;
import org.wso2.carbon.identity.application.common.model.LocalRole;
import org.wso2.carbon.identity.application.common.model.PermissionsAndRoleConfig;
import org.wso2.carbon.identity.application.common.model.Property;
import org.wso2.carbon.identity.application.common.model.ProvisioningConnectorConfig;
import org.wso2.carbon.identity.application.common.model.RoleMapping;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationConstants;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationManagementUtil;
import org.wso2.carbon.identity.base.IdentityConstants;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.idp.mgt.dao.CacheBackedIdPMgtDAO;
import org.wso2.carbon.idp.mgt.dao.FileBasedIdPMgtDAO;
import org.wso2.carbon.idp.mgt.dao.IdPManagementDAO;
import org.wso2.carbon.idp.mgt.internal.IdPManagementServiceComponent;
import org.wso2.carbon.idp.mgt.internal.IdpMgtServiceComponentHolder;
import org.wso2.carbon.idp.mgt.listener.IdentityProviderMgtListener;
import org.wso2.carbon.identity.core.model.ExpressionNode;
import org.wso2.carbon.idp.mgt.object.IdpSearchResult;
import org.wso2.carbon.identity.core.model.Node;
import org.wso2.carbon.identity.core.model.OperationNode;
import org.wso2.carbon.identity.core.model.FilterTreeBuilder;
import org.wso2.carbon.idp.mgt.util.IdPManagementConstants;
import org.wso2.carbon.idp.mgt.util.IdPManagementUtil;
import org.wso2.carbon.idp.mgt.util.MetadataConverter;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.KeyStore;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.stream.XMLStreamException;
public class IdentityProviderManager implements IdpManager {
private static final Log log = LogFactory.getLog(IdentityProviderManager.class);
private static CacheBackedIdPMgtDAO dao = new CacheBackedIdPMgtDAO(new IdPManagementDAO());
private static volatile IdentityProviderManager instance = new IdentityProviderManager();
private static final String OPENID_IDP_ENTITY_ID = "IdPEntityId";
private MetadataConverter SAML2SSOMetadataConverter = null;
private IdentityProviderManager() {
}
/**
* @return
*/
public static IdentityProviderManager getInstance() {
return instance;
}
/**
* Retrieves resident Identity provider for a given tenant
*
* @param tenantDomain Tenant domain whose resident IdP is requested
* @return <code>LocalIdentityProvider</code>
* @throws IdentityProviderManagementException Error when getting Resident Identity Providers
*/
@Override
public IdentityProvider getResidentIdP(String tenantDomain)
throws IdentityProviderManagementException {
IdPManagementUtil.setTenantSpecifiers(tenantDomain);
String openIdUrl;
String samlSSOUrl;
String samlLogoutUrl;
String samlECPUrl;
String samlArtifactUrl;
String oauth1RequestTokenUrl;
String oauth1AuthorizeUrl;
String oauth1AccessTokenUrl;
String oauth2AuthzEPUrl;
String oauth2TokenEPUrl;
String oauth2RevokeEPUrl;
String oauth2IntrospectEpUrl;
String oauth2UserInfoEPUrl;
String oidcCheckSessionEPUrl;
String oidcLogoutEPUrl;
String oIDCWebFingerEPUrl;
String oAuth2DCREPUrl;
String oAuth2JWKSPage;
String oIDCDiscoveryEPUrl;
String passiveStsUrl;
String stsUrl;
String scimUsersEndpoint;
String scimGroupsEndpoint;
String scim2UsersEndpoint;
String scim2GroupsEndpoint;
openIdUrl = IdentityUtil.getProperty(IdentityConstants.ServerConfig.OPENID_SERVER_URL);
samlECPUrl = IdentityUtil.getProperty(IdentityConstants.ServerConfig.SAML_ECP_URL);
samlArtifactUrl = IdentityUtil.getProperty(IdentityConstants.ServerConfig.SSO_ARTIFACT_URL);
oauth1RequestTokenUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_REQUEST_TOKEN_URL);
oauth1AuthorizeUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_AUTHORIZE_URL);
oauth1AccessTokenUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_ACCESSTOKEN_URL);
oauth2AuthzEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_AUTHZ_EP_URL);
oauth2TokenEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_TOKEN_EP_URL);
oauth2UserInfoEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_USERINFO_EP_URL);
oidcCheckSessionEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_CHECK_SESSION_EP_URL);
oidcLogoutEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_LOGOUT_EP_URL);
passiveStsUrl = IdentityUtil.getProperty(IdentityConstants.STS.PSTS_IDENTITY_PROVIDER_URL);
stsUrl = IdentityUtil.getProperty(IdentityConstants.STS.STS_IDENTITY_PROVIDER_URL);
scimUsersEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM.USER_EP_URL);
scimGroupsEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM.GROUP_EP_URL);
scim2UsersEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM2.USER_EP_URL);
scim2GroupsEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM2.GROUP_EP_URL);
oauth2RevokeEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_REVOKE_EP_URL);
oauth2IntrospectEpUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_INTROSPECT_EP_URL);
oIDCWebFingerEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_WEB_FINGER_EP_URL);
oAuth2DCREPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_DCR_EP_URL);
oAuth2JWKSPage = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_JWKS_EP_URL);
oIDCDiscoveryEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_DISCOVERY_EP_URL);
if (StringUtils.isBlank(openIdUrl)) {
openIdUrl = IdentityUtil.getServerURL(IdentityConstants.OpenId.OPENID, true, true);
}
samlSSOUrl = IdentityUtil.getServerURL(IdentityConstants.ServerConfig.SAMLSSO, true, true)
+ IdPManagementUtil.getTenantParameter();
samlLogoutUrl = IdentityUtil.getServerURL(IdentityConstants.ServerConfig.SAMLSSO, true, true)
+ IdPManagementUtil.getTenantParameter();
if (StringUtils.isBlank(samlArtifactUrl)) {
samlArtifactUrl = IdentityUtil.getServerURL(IdentityConstants.ServerConfig.SAMLSSO, true, true);
}
if (StringUtils.isBlank(samlECPUrl)) {
samlECPUrl = IdentityUtil.getServerURL(IdentityConstants.ServerConfig.SAMLSSO, true, true);
}
if (StringUtils.isBlank(oauth1RequestTokenUrl)) {
oauth1RequestTokenUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.REQUEST_TOKEN, true, true);
}
if (StringUtils.isBlank(oauth1AuthorizeUrl)) {
oauth1AuthorizeUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.AUTHORIZE_URL, true, true);
}
if (StringUtils.isBlank(oauth1AccessTokenUrl)) {
oauth1AccessTokenUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.ACCESS_TOKEN, true, true);
}
if (StringUtils.isBlank(oauth2AuthzEPUrl)) {
oauth2AuthzEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.AUTHORIZE, true, false);
}
if (StringUtils.isBlank(oauth2TokenEPUrl)) {
oauth2TokenEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.TOKEN, true, false);
}
if (StringUtils.isBlank(oauth2RevokeEPUrl)) {
oauth2RevokeEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.REVOKE, true, false);
}
if (StringUtils.isBlank(oauth2IntrospectEpUrl)) {
oauth2IntrospectEpUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.INTROSPECT, true, false);
}
if (StringUtils.isBlank(oauth2UserInfoEPUrl)) {
oauth2UserInfoEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.USERINFO, true, false);
}
if (StringUtils.isBlank(oidcCheckSessionEPUrl)) {
oidcCheckSessionEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.CHECK_SESSION, true, false);
}
if (StringUtils.isBlank(oidcLogoutEPUrl)) {
oidcLogoutEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.LOGOUT, true, false);
}
if (StringUtils.isBlank(passiveStsUrl)) {
passiveStsUrl = IdentityUtil.getServerURL(IdentityConstants.STS.PASSIVE_STS, true, true);
}
if (StringUtils.isBlank(oIDCWebFingerEPUrl)) {
oIDCWebFingerEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.WEBFINGER, true, true);
}
if (StringUtils.isBlank(oAuth2DCREPUrl)) {
oAuth2DCREPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.DCR, true, true);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
oAuth2DCREPUrl = getTenantUrl(oAuth2DCREPUrl, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("OAuth 2 DCR endpoint is malformed");
}
if (StringUtils.isBlank(oAuth2JWKSPage)) {
oAuth2JWKSPage = IdentityUtil.getServerURL(IdentityConstants.OAuth.JWKS, true, true);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
oAuth2JWKSPage = getTenantUrl(oAuth2JWKSPage, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("OAuth 2 JWKS endpoint is malformed");
}
if (StringUtils.isBlank(oIDCDiscoveryEPUrl)) {
oIDCDiscoveryEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.DISCOVERY, true, true);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
oIDCDiscoveryEPUrl = getTenantUrl(oIDCDiscoveryEPUrl, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("OIDC Discovery endpoint is malformed");
}
// If sts url is configured in file, change it according to tenant domain. If not configured, add a default url
if (StringUtils.isNotBlank(stsUrl)) {
stsUrl = stsUrl.replace(IdentityConstants.STS.WSO2_CARBON_STS, IdPManagementUtil.getTenantContext() +
IdentityConstants.STS.WSO2_CARBON_STS);
} else {
stsUrl = IdentityUtil.getServerURL("services/" + IdPManagementUtil.getTenantContext() +
IdentityConstants.STS.WSO2_CARBON_STS, true, true);
}
if (StringUtils.isBlank(scimUsersEndpoint)) {
scimUsersEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM.USER_EP, true, false);
}
if (StringUtils.isBlank(scimGroupsEndpoint)) {
scimGroupsEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM.GROUP_EP, true, false);
}
if (StringUtils.isBlank(scim2UsersEndpoint)) {
scim2UsersEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM2.USER_EP, true, false);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
scim2UsersEndpoint = getTenantUrl(scim2UsersEndpoint, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("SCIM 2.0 Users endpoint is malformed");
}
if (StringUtils.isBlank(scim2GroupsEndpoint)) {
scim2GroupsEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM2.GROUP_EP, true, false);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
scim2GroupsEndpoint = getTenantUrl(scim2GroupsEndpoint, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("SCIM 2.0 Groups endpoint is malformed");
}
IdentityProvider identityProvider = dao.getIdPByName(null,
IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME,
IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
if (identityProvider == null) {
String message = "Could not find Resident Identity Provider for tenant " + tenantDomain;
throw new IdentityProviderManagementException(message);
}
int tenantId = -1;
try {
tenantId = IdPManagementServiceComponent.getRealmService().getTenantManager().getTenantId(tenantDomain);
} catch (UserStoreException e) {
throw new IdentityProviderManagementException(
"Exception occurred while retrieving Tenant ID from Tenant Domain " + tenantDomain, e);
}
X509Certificate cert = null;
try {
IdentityTenantUtil.initializeRegistry(tenantId, tenantDomain);
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext();
carbonContext.setTenantDomain(tenantDomain, true);
KeyStoreManager keyStoreManager = KeyStoreManager.getInstance(tenantId);
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
// derive key store name
String ksName = tenantDomain.trim().replace(".", "-");
// derive JKS name
String jksName = ksName + ".jks";
KeyStore keyStore = keyStoreManager.getKeyStore(jksName);
cert = (X509Certificate) keyStore.getCertificate(tenantDomain);
} else {
cert = keyStoreManager.getDefaultPrimaryCertificate();
}
} catch (Exception e) {
String msg = "Error retrieving primary certificate for tenant : " + tenantDomain;
throw new IdentityProviderManagementException(msg, e);
} finally {
PrivilegedCarbonContext.endTenantFlow();
}
if (cert == null) {
throw new IdentityProviderManagementException(
"Cannot find the primary certificate for tenant " + tenantDomain);
}
try {
identityProvider.setCertificate(Base64.encode(cert.getEncoded()));
} catch (CertificateEncodingException e) {
String msg = "Error occurred while encoding primary certificate for tenant domain " + tenantDomain;
throw new IdentityProviderManagementException(msg, e);
}
List<FederatedAuthenticatorConfig> fedAuthnCofigs = new ArrayList<FederatedAuthenticatorConfig>();
List<Property> propertiesList = null;
FederatedAuthenticatorConfig openIdFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.OpenID.NAME);
if (openIdFedAuthn == null) {
openIdFedAuthn = new FederatedAuthenticatorConfig();
openIdFedAuthn.setName(IdentityApplicationConstants.Authenticator.OpenID.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(openIdFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(openIdFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OpenID.OPEN_ID_URL) == null) {
Property openIdUrlProp = new Property();
openIdUrlProp.setName(IdentityApplicationConstants.Authenticator.OpenID.OPEN_ID_URL);
openIdUrlProp.setValue(openIdUrl);
propertiesList.add(openIdUrlProp);
}
openIdFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(openIdFedAuthn);
FederatedAuthenticatorConfig saml2SSOFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
if (saml2SSOFedAuthn == null) {
saml2SSOFedAuthn = new FederatedAuthenticatorConfig();
saml2SSOFedAuthn.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
}
propertiesList = new ArrayList<>();
Property samlSSOUrlProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL);
if (samlSSOUrlProperty == null) {
samlSSOUrlProperty = new Property();
samlSSOUrlProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL);
// Set the generated saml sso endpoint value.
samlSSOUrlProperty.setValue(samlSSOUrl);
}
propertiesList.add(samlSSOUrlProperty);
Property samlLogoutUrlProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL);
if (samlLogoutUrlProperty == null) {
samlLogoutUrlProperty = new Property();
samlLogoutUrlProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL);
// Set the generated saml slo endpoint value.
samlLogoutUrlProperty.setValue(samlLogoutUrl);
}
propertiesList.add(samlLogoutUrlProperty);
Property samlECPUrlProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL);
if (samlECPUrlProperty == null) {
samlECPUrlProperty = new Property();
samlECPUrlProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL);
}
//set the generated saml ecp endpoint value
samlECPUrlProperty.setValue(samlECPUrl);
propertiesList.add(samlECPUrlProperty);
Property idPEntityIdProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
if (idPEntityIdProperty == null) {
idPEntityIdProperty = new Property();
idPEntityIdProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
idPEntityIdProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
}
propertiesList.add(idPEntityIdProperty);
Property samlArtifactUrlProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.ARTIFACT_RESOLVE_URL);
if (samlArtifactUrlProperty == null) {
samlArtifactUrlProperty = new Property();
samlArtifactUrlProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.ARTIFACT_RESOLVE_URL);
}
samlArtifactUrlProperty.setValue(samlArtifactUrl);
propertiesList.add(samlArtifactUrlProperty);
for (Property property : saml2SSOFedAuthn.getProperties()) {
if (property != null &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.getName())) {
propertiesList.add(property);
}
}
Property samlMetadataValidityPeriodProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
if (samlMetadataValidityPeriodProperty == null) {
samlMetadataValidityPeriodProperty = new Property();
samlMetadataValidityPeriodProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
samlMetadataValidityPeriodProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT);
}
propertiesList.add(samlMetadataValidityPeriodProperty);
Property samlMetadataSigningEnabledProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
if (samlMetadataSigningEnabledProperty == null) {
samlMetadataSigningEnabledProperty = new Property();
samlMetadataSigningEnabledProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
samlMetadataSigningEnabledProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED_DEFAULT);
}
propertiesList.add(samlMetadataSigningEnabledProperty);
saml2SSOFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(saml2SSOFedAuthn);
FederatedAuthenticatorConfig oauth1FedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.OAuth10A.NAME);
if (oauth1FedAuthn == null) {
oauth1FedAuthn = new FederatedAuthenticatorConfig();
oauth1FedAuthn.setName(IdentityApplicationConstants.OAuth10A.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(oauth1FedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_REQUEST_TOKEN_URL) == null) {
Property oauth1ReqTokUrlProp = new Property();
oauth1ReqTokUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_REQUEST_TOKEN_URL);
oauth1ReqTokUrlProp.setValue(oauth1RequestTokenUrl);
propertiesList.add(oauth1ReqTokUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_AUTHORIZE_URL) == null) {
Property oauth1AuthzUrlProp = new Property();
oauth1AuthzUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_AUTHORIZE_URL);
oauth1AuthzUrlProp.setValue(oauth1AuthorizeUrl);
propertiesList.add(oauth1AuthzUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_ACCESS_TOKEN_URL) == null) {
Property oauth1AccessTokUrlProp = new Property();
oauth1AccessTokUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_ACCESS_TOKEN_URL);
oauth1AccessTokUrlProp.setValue(oauth1AccessTokenUrl);
propertiesList.add(oauth1AccessTokUrlProp);
}
oauth1FedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(oauth1FedAuthn);
FederatedAuthenticatorConfig oidcFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.OIDC.NAME);
if (oidcFedAuthn == null) {
oidcFedAuthn = new FederatedAuthenticatorConfig();
oidcFedAuthn.setName(IdentityApplicationConstants.Authenticator.OIDC.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(oidcFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
OPENID_IDP_ENTITY_ID) == null) {
Property idPEntityIdProp = new Property();
idPEntityIdProp.setName(OPENID_IDP_ENTITY_ID);
idPEntityIdProp.setValue(getOIDCResidentIdPEntityId());
propertiesList.add(idPEntityIdProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_AUTHZ_URL) == null) {
Property authzUrlProp = new Property();
authzUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_AUTHZ_URL);
authzUrlProp.setValue(oauth2AuthzEPUrl);
propertiesList.add(authzUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_TOKEN_URL) == null) {
Property tokenUrlProp = new Property();
tokenUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_TOKEN_URL);
tokenUrlProp.setValue(oauth2TokenEPUrl);
propertiesList.add(tokenUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_REVOKE_URL) == null) {
Property revokeUrlProp = new Property();
revokeUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_REVOKE_URL);
revokeUrlProp.setValue(oauth2RevokeEPUrl);
propertiesList.add(revokeUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_INTROSPECT_URL) == null) {
Property instropsectUrlProp = new Property();
instropsectUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_INTROSPECT_URL);
instropsectUrlProp.setValue(oauth2IntrospectEpUrl);
propertiesList.add(instropsectUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_USER_INFO_EP_URL) == null) {
Property userInfoUrlProp = new Property();
userInfoUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_USER_INFO_EP_URL);
userInfoUrlProp.setValue(oauth2UserInfoEPUrl);
propertiesList.add(userInfoUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OIDC_CHECK_SESSION_URL) == null) {
Property checkSessionUrlProp = new Property();
checkSessionUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OIDC_CHECK_SESSION_URL);
checkSessionUrlProp.setValue(oidcCheckSessionEPUrl);
propertiesList.add(checkSessionUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OIDC_LOGOUT_URL) == null) {
Property logoutUrlProp = new Property();
logoutUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OIDC_LOGOUT_URL);
logoutUrlProp.setValue(oidcLogoutEPUrl);
propertiesList.add(logoutUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_DCR_EP_URL) == null) {
Property dcrUrlProp = new Property();
dcrUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_DCR_EP_URL);
dcrUrlProp.setValue(oAuth2DCREPUrl);
propertiesList.add(dcrUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OIDC_WEB_FINGER_EP_URL) == null) {
Property webFingerUrlProp = new Property();
webFingerUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OIDC_WEB_FINGER_EP_URL);
webFingerUrlProp.setValue(oIDCWebFingerEPUrl);
propertiesList.add(webFingerUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_JWKS_EP_URL) == null) {
Property jwksUrlProp = new Property();
jwksUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_JWKS_EP_URL);
jwksUrlProp.setValue(oAuth2JWKSPage);
propertiesList.add(jwksUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OIDC_DISCOVERY_EP_URL) == null) {
Property discoveryUrlProp = new Property();
discoveryUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OIDC_DISCOVERY_EP_URL);
discoveryUrlProp.setValue(oIDCDiscoveryEPUrl);
propertiesList.add(discoveryUrlProp);
}
oidcFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(oidcFedAuthn);
FederatedAuthenticatorConfig passiveSTSFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
if (passiveSTSFedAuthn == null) {
passiveSTSFedAuthn = new FederatedAuthenticatorConfig();
passiveSTSFedAuthn.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
}
propertiesList = new ArrayList<>();
Property passiveSTSUrlProperty = IdentityApplicationManagementUtil.getProperty(passiveSTSFedAuthn
.getProperties(), IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL);
if (passiveSTSUrlProperty == null) {
passiveSTSUrlProperty = new Property();
passiveSTSUrlProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL);
}
passiveSTSUrlProperty.setValue(passiveStsUrl);
propertiesList.add(passiveSTSUrlProperty);
Property stsIdPEntityIdProperty = IdentityApplicationManagementUtil.getProperty(passiveSTSFedAuthn
.getProperties(), IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_ENTITY_ID);
if (stsIdPEntityIdProperty == null) {
stsIdPEntityIdProperty = new Property();
stsIdPEntityIdProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS
.IDENTITY_PROVIDER_ENTITY_ID);
stsIdPEntityIdProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
}
propertiesList.add(stsIdPEntityIdProperty);
for (Property property : passiveSTSFedAuthn.getProperties()) {
if (property != null && !IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL
.equals(property.getName()) && !IdentityApplicationConstants.Authenticator.PassiveSTS
.IDENTITY_PROVIDER_ENTITY_ID.equals(property.getName())) {
propertiesList.add(property);
}
}
passiveSTSFedAuthn
.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(passiveSTSFedAuthn);
FederatedAuthenticatorConfig stsFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.WSTrust.NAME);
if (stsFedAuthn == null) {
stsFedAuthn = new FederatedAuthenticatorConfig();
stsFedAuthn.setName(IdentityApplicationConstants.Authenticator.WSTrust.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(stsFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(stsFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.WSTrust.IDENTITY_PROVIDER_URL) == null) {
Property stsUrlProp = new Property();
stsUrlProp.setName(IdentityApplicationConstants.Authenticator.WSTrust.IDENTITY_PROVIDER_URL);
stsUrlProp.setValue(stsUrl);
propertiesList.add(stsUrlProp);
}
stsFedAuthn
.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(stsFedAuthn);
List<IdentityProviderProperty> identityProviderProperties = new ArrayList<IdentityProviderProperty>();
FederatedAuthenticatorConfig sessionTimeoutConfig = new FederatedAuthenticatorConfig();
sessionTimeoutConfig.setName(IdentityApplicationConstants.NAME);
propertiesList = new ArrayList<Property>(Arrays.asList(sessionTimeoutConfig.getProperties()));
Property cleanUpPeriodProp = new Property();
cleanUpPeriodProp.setName(IdentityApplicationConstants.CLEAN_UP_PERIOD);
String cleanUpPeriod = IdentityUtil.getProperty(IdentityConstants.ServerConfig.CLEAN_UP_PERIOD);
if (StringUtils.isBlank(cleanUpPeriod)) {
cleanUpPeriod = IdentityApplicationConstants.CLEAN_UP_PERIOD_DEFAULT;
} else if (!StringUtils.isNumeric(cleanUpPeriod)) {
log.warn("PersistanceCleanUpPeriod in identity.xml should be a numeric value");
cleanUpPeriod = IdentityApplicationConstants.CLEAN_UP_PERIOD_DEFAULT;
}
cleanUpPeriodProp.setValue(cleanUpPeriod);
propertiesList.add(cleanUpPeriodProp);
sessionTimeoutConfig.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(sessionTimeoutConfig);
identityProvider.setFederatedAuthenticatorConfigs(fedAuthnCofigs
.toArray(new FederatedAuthenticatorConfig[fedAuthnCofigs.size()]));
ProvisioningConnectorConfig scimProvConn = IdentityApplicationManagementUtil
.getProvisioningConnector(identityProvider.getProvisioningConnectorConfigs(),
"scim");
if (scimProvConn == null) {
scimProvConn = new ProvisioningConnectorConfig();
scimProvConn.setName("scim");
}
propertiesList = new ArrayList<>(Arrays.asList(scimProvConn.getProvisioningProperties()));
Property scimUserEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM.USERS_EP_URL);
if (scimUserEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM.USERS_EP_URL);
property.setValue(scimUsersEndpoint);
propertiesList.add(property);
} else if (!scimUsersEndpoint.equalsIgnoreCase(scimUserEndpointProperty.getValue())) {
scimUserEndpointProperty.setValue(scimUsersEndpoint);
}
Property scimGroupEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM.GROUPS_EP_URL);
if (scimGroupEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM.GROUPS_EP_URL);
property.setValue(scimGroupsEndpoint);
propertiesList.add(property);
} else if (!scimGroupsEndpoint.equalsIgnoreCase(scimGroupEndpointProperty.getValue())) {
scimGroupEndpointProperty.setValue(scimGroupsEndpoint);
}
Property scim2UserEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM2.USERS_EP_URL);
if (scim2UserEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM2.USERS_EP_URL);
property.setValue(scim2UsersEndpoint);
propertiesList.add(property);
} else if (!scim2UsersEndpoint.equalsIgnoreCase(scim2UserEndpointProperty.getValue())) {
scim2UserEndpointProperty.setValue(scim2UsersEndpoint);
}
Property scim2GroupEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM2.GROUPS_EP_URL);
if (scim2GroupEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM2.GROUPS_EP_URL);
property.setValue(scim2GroupsEndpoint);
propertiesList.add(property);
} else if (!scim2GroupsEndpoint.equalsIgnoreCase(scim2GroupEndpointProperty.getValue())) {
scim2GroupEndpointProperty.setValue(scim2GroupsEndpoint);
}
scimProvConn.setProvisioningProperties(propertiesList.toArray(new Property[propertiesList.size()]));
identityProvider.setProvisioningConnectorConfigs(new ProvisioningConnectorConfig[]{scimProvConn});
// Override few endpoint URLs which are initially persisted in the database and can be out dated with hostname
// changes.
overrideResidentIdpEPUrls(identityProvider);
return identityProvider;
}
/**
* Add Resident Identity provider for a given tenant
*
* @param identityProvider <code>IdentityProvider</code>
* @param tenantDomain Tenant domain whose resident IdP is requested
* @throws IdentityProviderManagementException Error when adding Resident Identity Provider
*/
@Override
public void addResidentIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreAddResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
if (identityProvider.getFederatedAuthenticatorConfigs() == null) {
identityProvider.setFederatedAuthenticatorConfigs(new FederatedAuthenticatorConfig[0]);
}
FederatedAuthenticatorConfig saml2SSOResidentAuthenticatorConfig = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
if (saml2SSOResidentAuthenticatorConfig == null) {
saml2SSOResidentAuthenticatorConfig = new FederatedAuthenticatorConfig();
saml2SSOResidentAuthenticatorConfig.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
}
if (saml2SSOResidentAuthenticatorConfig.getProperties() == null) {
saml2SSOResidentAuthenticatorConfig.setProperties(new Property[0]);
}
boolean idPEntityIdAvailable = false;
for (Property property : saml2SSOResidentAuthenticatorConfig.getProperties()) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.getName())) {
idPEntityIdAvailable = true;
}
}
if (!idPEntityIdAvailable) {
Property property = new Property();
property.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
property.setValue(IdPManagementUtil.getResidentIdPEntityId());
if (saml2SSOResidentAuthenticatorConfig.getProperties().length > 0) {
List<Property> properties = Arrays.asList(saml2SSOResidentAuthenticatorConfig.getProperties());
properties.add(property);
saml2SSOResidentAuthenticatorConfig.setProperties((Property[]) properties.toArray());
} else {
saml2SSOResidentAuthenticatorConfig.setProperties(new Property[]{property});
}
}
Property samlMetadataValidityPeriodProperty = new Property();
String samlMetadataValidityPeriod = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_VALIDITY_PERIOD);
if (StringUtils.isBlank(samlMetadataValidityPeriod)) {
samlMetadataValidityPeriod = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT;
} else if (!StringUtils.isNumeric(samlMetadataValidityPeriod) ||
Integer.parseInt(samlMetadataValidityPeriod) <= 0) {
log.warn("SAMLMetadataValidityPeriod in identity.xml should be a numeric value " +
"hence defaulting to value: " + IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT + "m");
samlMetadataValidityPeriod = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT;
}
samlMetadataValidityPeriodProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
samlMetadataValidityPeriodProperty.setValue(samlMetadataValidityPeriod);
Property samlMetadataSigningEnabledProperty = new Property();
String samlMetadataSigningEnabled = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_SIGNING_ENABLED);
if (StringUtils.isBlank(samlMetadataSigningEnabled)) {
log.warn("SAMLMetadataSigningEnabled in identity.xml should be a boolean value");
samlMetadataSigningEnabled = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED_DEFAULT;
}
samlMetadataSigningEnabledProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
samlMetadataSigningEnabledProperty.setValue(samlMetadataSigningEnabled);
List<Property> propertyList = new ArrayList<>(Arrays.asList(saml2SSOResidentAuthenticatorConfig.getProperties()));
propertyList.add(samlMetadataValidityPeriodProperty);
propertyList.add(samlMetadataSigningEnabledProperty);
Property[] properties = new Property[propertyList.size()];
properties = propertyList.toArray(properties);
saml2SSOResidentAuthenticatorConfig.setProperties(properties);
Property oidcProperty = new Property();
oidcProperty.setName(OPENID_IDP_ENTITY_ID);
oidcProperty.setValue(getOIDCResidentIdPEntityId());
FederatedAuthenticatorConfig oidcAuthenticationConfig = new FederatedAuthenticatorConfig();
oidcAuthenticationConfig.setProperties(new Property[]{oidcProperty});
oidcAuthenticationConfig.setName(IdentityApplicationConstants.Authenticator.OIDC.NAME);
Property passiveStsProperty = new Property();
passiveStsProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_ENTITY_ID);
passiveStsProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
FederatedAuthenticatorConfig passiveStsAuthenticationConfig = new FederatedAuthenticatorConfig();
passiveStsAuthenticationConfig.setProperties(new Property[]{passiveStsProperty});
passiveStsAuthenticationConfig.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs = {saml2SSOResidentAuthenticatorConfig,
passiveStsAuthenticationConfig, oidcAuthenticationConfig};
identityProvider.setFederatedAuthenticatorConfigs(IdentityApplicationManagementUtil
.concatArrays(identityProvider.getFederatedAuthenticatorConfigs(), federatedAuthenticatorConfigs));
IdentityProviderProperty[] idpProperties = new IdentityProviderProperty[2];
IdentityProviderProperty rememberMeTimeoutProperty = new IdentityProviderProperty();
String rememberMeTimeout = IdentityUtil.getProperty(IdentityConstants.ServerConfig.REMEMBER_ME_TIME_OUT);
if (StringUtils.isBlank(rememberMeTimeout) || !StringUtils.isNumeric(rememberMeTimeout) ||
Integer.parseInt(rememberMeTimeout) <= 0) {
log.warn("RememberMeTimeout in identity.xml should be a numeric value");
rememberMeTimeout = IdentityApplicationConstants.REMEMBER_ME_TIME_OUT_DEFAULT;
}
rememberMeTimeoutProperty.setName(IdentityApplicationConstants.REMEMBER_ME_TIME_OUT);
rememberMeTimeoutProperty.setValue(rememberMeTimeout);
IdentityProviderProperty sessionIdletimeOutProperty = new IdentityProviderProperty();
String idleTimeout = IdentityUtil.getProperty(IdentityConstants.ServerConfig.SESSION_IDLE_TIMEOUT);
if (StringUtils.isBlank(idleTimeout) || !StringUtils.isNumeric(idleTimeout) ||
Integer.parseInt(idleTimeout) <= 0) {
log.warn("SessionIdleTimeout in identity.xml should be a numeric value");
idleTimeout = IdentityApplicationConstants.SESSION_IDLE_TIME_OUT_DEFAULT;
}
sessionIdletimeOutProperty.setName(IdentityApplicationConstants.SESSION_IDLE_TIME_OUT);
sessionIdletimeOutProperty.setValue(idleTimeout);
idpProperties[0] = rememberMeTimeoutProperty;
idpProperties[1] = sessionIdletimeOutProperty;
identityProvider.setIdpProperties(idpProperties);
dao.addIdP(identityProvider, IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostAddResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
}
/**
* Update Resident Identity provider for a given tenant
*
* @param identityProvider <code>IdentityProvider</code>
* @param tenantDomain Tenant domain whose resident IdP is requested
* @throws IdentityProviderManagementException Error when updating Resident Identity Provider
*/
@Override
public void updateResidentIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider residentIdp = dao.getIdPByName(null, IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME,
IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
Map<String, String> configurationDetails = new HashMap<>();
for (IdentityProviderProperty property : identityProvider.getIdpProperties()) {
configurationDetails.put(property.getName(), property.getValue());
}
IdentityProviderProperty[] identityMgtProperties = residentIdp.getIdpProperties();
List<IdentityProviderProperty> newProperties = new ArrayList<>();
for (IdentityProviderProperty identityMgtProperty : identityMgtProperties) {
IdentityProviderProperty prop = new IdentityProviderProperty();
String key = identityMgtProperty.getName();
prop.setName(key);
if (configurationDetails.containsKey(key)) {
prop.setValue(configurationDetails.get(key));
} else {
prop.setValue(identityMgtProperty.getValue());
}
newProperties.add(prop);
configurationDetails.remove(key);
}
for (Map.Entry<String, String> entry : configurationDetails.entrySet()) {
IdentityProviderProperty prop = new IdentityProviderProperty();
prop.setName(entry.getKey());
prop.setValue(entry.getValue());
newProperties.add(prop);
}
identityProvider.setIdpProperties(newProperties.toArray(new IdentityProviderProperty[newProperties.size()]));
for (IdentityProviderProperty idpProp : identityProvider.getIdpProperties()) {
if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.SESSION_IDLE_TIME_OUT)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.SESSION_IDLE_TIME_OUT
+ " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.REMEMBER_ME_TIME_OUT)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.REMEMBER_ME_TIME_OUT
+ " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD + " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED)) {
if (StringUtils.isBlank(idpProp.getValue())) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED + " of ResidentIdP should be a boolean value ");
}
}
}
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
if (identityProvider.getFederatedAuthenticatorConfigs() == null) {
identityProvider.setFederatedAuthenticatorConfigs(new FederatedAuthenticatorConfig[0]);
}
IdentityProvider currentIdP = IdentityProviderManager.getInstance().getIdPByName(
IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME, tenantDomain, true);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
validateUpdateOfIdPEntityId(currentIdP.getFederatedAuthenticatorConfigs(),
identityProvider.getFederatedAuthenticatorConfigs(), tenantId, tenantDomain);
dao.updateIdP(identityProvider, currentIdP, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
}
/**
* Retrieves registered Identity finally {
* break;
* }providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getIdPs(String tenantDomain)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPs(null, tenantId, tenantDomain);
}
/**
* Get all basic identity provider information.
*
* @param limit limit per page.
* @param offset offset value.
* @param filter filter value for IdP search.
* @param sortOrder order of IdP ASC/DESC.
* @param sortBy the column value need to sort.
* @param tenantDomain tenant domain whose IdP names are requested.
* @return Identity Provider's Basic Information array {@link IdpSearchResult}.
* @throws IdentityProviderManagementServerException server related error while getting Identity Providers object.
* @throws IdentityProviderManagementClientException client related error while getting Identity Providers object.
*/
@Override
public IdpSearchResult getIdPs(Integer limit, Integer offset, String filter, String sortOrder, String sortBy,
String tenantDomain)
throws IdentityProviderManagementServerException, IdentityProviderManagementClientException {
nullCheck(limit, offset);
if (limit < 0) {
String message = "Limit should be negative value. limit:" + limit;
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
IdpSearchResult result = new IdpSearchResult();
List<ExpressionNode> expressionNodes = getExpressionNodes(filter);
setParameters(limit, offset, sortOrder, sortBy, filter, result);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
result.setIdpCount(dao.getTotalIdPCount(tenantId, expressionNodes));
result.setIdpList(dao.getIdPsSearch(tenantId, expressionNodes, result.getLimit(), result.getOffSet(),
result.getSortOrder(), result.getSortBy()));
return result;
}
/**
* Check null for limit and offset.
*
* @param limit limit per page.
* @param offset offset value.
* @throws IdentityProviderManagementClientException Error while limit and offset getting null.
*/
private void nullCheck(Integer limit, Integer offset) throws IdentityProviderManagementClientException {
if (limit == null) {
String message = "Limit should not null";
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
if (offset == null) {
String message = "Offset should not null";
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
}
/**
* Get the filter node as a list.
*
* @param filter value of the filter.
* @return node tree.
* @throws IdentityProviderManagementClientException Error when validate filters.
*/
private List<ExpressionNode> getExpressionNodes(String filter) throws IdentityProviderManagementClientException {
// Filter example : name sw "te" and name ew "st" and isEnabled eq "true".
List<ExpressionNode> expressionNodes = new ArrayList<>();
FilterTreeBuilder filterTreeBuilder;
try {
filterTreeBuilder = new FilterTreeBuilder(filter);
Node rootNode = filterTreeBuilder.buildTree();
setExpressionNodeList(rootNode, expressionNodes);
} catch (IOException | IdentityException e) {
String message = "Error occurred while validate filter, filter: " + filter;
throw IdPManagementUtil
.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP, message, e);
}
return expressionNodes;
}
/**
* Set the node values as list of expression.
*
* @param node filter node.
* @param expression list of expression.
* @throws IdentityProviderManagementClientException Error when passing invalid filter.
*/
private void setExpressionNodeList(Node node, List<ExpressionNode> expression)
throws IdentityProviderManagementClientException {
if (node instanceof ExpressionNode) {
if (((ExpressionNode) node).getAttributeValue().contains(IdPManagementConstants.IDP_IS_ENABLED)) {
if ("true".contains(((ExpressionNode) node).getValue())) {
((ExpressionNode) node).setValue(IdPManagementConstants.IS_TRUE_VALUE);
} else if ("false".contains(((ExpressionNode) node).getValue())) {
((ExpressionNode) node).setValue(IdPManagementConstants.IS_FALSE_VALUE);
} else {
String message = "Invalid \'isEnabled\' value passed in the filter. It should be \'true\' or " +
"\'false\' isEnabled = " + ((ExpressionNode) node).getValue();
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
}
expression.add((ExpressionNode) node);
} else if (node instanceof OperationNode) {
setExpressionNodeList(node.getLeftNode(), expression);
setExpressionNodeList(node.getRightNode(), expression);
}
}
/**
* Set the passing parameters as result.
*
* @param limit page limit.
* @param offset offset value.
* @param filter filter value for IdP search.
* @param sortOrder order of IdP(ASC/DESC).
* @param sortBy the column value need to sort.
* @param result result object.
* @throws IdentityProviderManagementClientException Error while set offset.
*/
private void setParameters(int limit, int offset, String filter, String sortOrder, String sortBy, IdpSearchResult
result) throws IdentityProviderManagementClientException {
result.setLimit(validateLimit(limit));
result.setOffSet(validateOffset(offset));
result.setSortBy(validateSortBy(sortBy));
result.setSortOrder(validateSortOrder(sortOrder));
result.setFilter(filter);
}
/**
* Validate sortBy.
*
* @param sortBy sortBy attribute.
* @return Validated sortOrder and sortBy.
*/
private String validateSortBy(String sortBy) {
if (StringUtils.isBlank(sortBy)) {
if (log.isDebugEnabled()) {
log.debug("sortBy attribute is empty. Therefore we set the default sortBy attribute. sortBy" + sortBy);
}
return IdPManagementConstants.DEFAULT_SORT_BY;
} else {
switch (sortBy) {
case IdPManagementConstants.IDP_NAME:
sortBy = IdPManagementConstants.NAME;
break;
case IdPManagementConstants.IDP_HOME_REALM_ID:
sortBy = IdPManagementConstants.HOME_REALM_ID;
break;
default:
sortBy = IdPManagementConstants.DEFAULT_SORT_BY;
if (log.isDebugEnabled()) {
log.debug("sortBy attribute is incorrect. Therefore we set the default sortBy attribute. " +
"sortBy: " + IdPManagementConstants.DEFAULT_SORT_BY);
}
break;
}
return sortBy;
}
}
/**
* Validate sortOrder.
*
* @param sortOrder sortOrder ASC/DESC.
* @return Validated sortOrder and sortBy.
*/
private String validateSortOrder(String sortOrder) {
if (StringUtils.isBlank(sortOrder)) {
sortOrder = IdPManagementConstants.DEFAULT_SORT_ORDER;
if (log.isDebugEnabled()) {
log.debug("sortOrder is empty. Therefore we set the default sortOrder value as ASC. SortOrder: " +
sortOrder);
}
} else if (sortOrder.equals(IdPManagementConstants.DESC_SORT_ORDER)) {
sortOrder = IdPManagementConstants.DESC_SORT_ORDER;
} else if (sortOrder.equals(IdPManagementConstants.ASC_SORT_ORDER)) {
sortOrder = IdPManagementConstants.ASC_SORT_ORDER;
} else {
sortOrder = IdPManagementConstants.DEFAULT_SORT_ORDER;
if (log.isDebugEnabled()) {
log.debug("sortOrder is incorrect. Therefore we set the default sortOrder value as ASC. SortOrder: "
+ sortOrder);
}
}
return sortOrder;
}
/**
* Validate limit.
*
* @param limit given limit value.
* @return validated limit and offset value.
*/
private int validateLimit(int limit) {
if (limit > IdPManagementConstants.MAXIMUM_LIMIT_PER_PAGE) {
try {
String itemsPerPagePropertyValue = ServerConfiguration.getInstance()
.getFirstProperty(IdPManagementConstants.ITEMS_PER_PAGE_PROPERTY);
if (log.isDebugEnabled()) {
log.debug("Given limit exceed the maximum limit. Therefore we get the default limit from " +
"carbon.xml. limit: " + limit);
}
if (StringUtils.isNotBlank(itemsPerPagePropertyValue)) {
limit = Integer.parseInt(itemsPerPagePropertyValue);
} else {
limit = IdPManagementConstants.MAXIMUM_LIMIT_PER_PAGE;
if (log.isDebugEnabled()) {
log.debug("limit is incorrect. Therefore we set the default limit. limit:" + limit);
}
}
} catch (NumberFormatException e) {
limit = IdPManagementConstants.DEFAULT_RESULTS_PER_PAGE;
log.warn("Error occurred while parsing the 'ItemsPerPage' property value in carbon.xml.", e);
}
}
return limit;
}
/**
* Validate offset.
*
* @param offset given offset value.
* @return validated limit and offset value.
* @throws IdentityProviderManagementClientException Error while set offset
*/
private int validateOffset(int offset) throws IdentityProviderManagementClientException {
if (offset < 0) {
String message = "Invalid offset applied. Offset should not negative. offSet: " +
offset;
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
return offset;
}
/**
* Retrieves registered Identity finally {
* break;
* }providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getIdPsSearch(String tenantDomain, String filter)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPsSearch(null, tenantId, tenantDomain, filter);
}
/**
* Retrieves registered Enabled Identity providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getEnabledIdPs(String tenantDomain)
throws IdentityProviderManagementException {
List<IdentityProvider> enabledIdentityProviders = new ArrayList<IdentityProvider>();
List<IdentityProvider> identityProviers = getIdPs(tenantDomain);
for (IdentityProvider idp : identityProviers) {
if (idp.isEnable()) {
enabledIdentityProviders.add(idp);
}
}
return enabledIdentityProviders;
}
/**
* @param idPName
* @param tenantDomain
* @param ignoreFileBasedIdps
* @return
* @throws IdentityProviderManagementException
*/
@Override
public IdentityProvider getIdPByName(String idPName, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (!ignoreFileBasedIdps) {
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
}
return identityProvider;
}
@Override
public IdentityProvider getIdPById(String id, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
if (StringUtils.isEmpty(id)) {
String msg = "Invalid argument: Identity Provider ID value is empty";
throw new IdentityProviderManagementException(msg);
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
Integer intId;
IdentityProvider identityProvider = null;
try {
intId = Integer.parseInt(id);
identityProvider = dao.getIdPById(null, intId, tenantId, tenantDomain);
} catch (NumberFormatException e) {
// Ignore this.
}
if (!ignoreFileBasedIdps) {
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(id, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
}
return identityProvider;
}
@Override
public IdentityProvider getIdPByResourceId(String resourceId, String tenantDomain, boolean
ignoreFileBasedIdps) throws IdentityProviderManagementException {
validateGetIdPInputValues(resourceId);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPByResourceId(resourceId, tenantId, tenantDomain);
}
/**
* @param idPName
* @param tenantDomain
* @param ignoreFileBasedIdps
* @return
* @throws IdentityProviderManagementException
*/
@Override
public IdentityProvider getEnabledIdPByName(String idPName, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByName(idPName, tenantDomain, ignoreFileBasedIdps);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant by Identity Provider name
*
* @param idPName Unique name of the Identity provider of whose information is requested
* @param tenantDomain Tenant domain whose information is requested
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP name
*/
@Override
public IdentityProvider getIdPByName(String idPName, String tenantDomain)
throws IdentityProviderManagementException {
return getIdPByName(idPName, tenantDomain, false);
}
@Override
public IdentityProvider getIdPById(String id, String tenantDomain) throws IdentityProviderManagementException {
return getIdPById(id, tenantDomain, false);
}
/**
* @param property IDP authenticator property (E.g.: IdPEntityId)
* @param value Value associated with given Property
* @param tenantDomain
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by authenticator property value
*/
@Override
public IdentityProvider getIdPByAuthenticatorPropertyValue(String property, String value, String tenantDomain,
boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value)) {
String msg = "Invalid argument: Authenticator property or property value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByAuthenticatorPropertyValue(
null, property, value, tenantId, tenantDomain);
if (identityProvider == null && !ignoreFileBasedIdps) {
identityProvider = new FileBasedIdPMgtDAO()
.getIdPByAuthenticatorPropertyValue(property, value, tenantDomain);
}
return identityProvider;
}
/**
* @param property IDP authenticator property (E.g.: IdPEntityId)
* @param value Value associated with given Property
* @param tenantDomain
* @param authenticator
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by authenticator property value
*/
public IdentityProvider getIdPByAuthenticatorPropertyValue(String property, String value, String tenantDomain,
String authenticator, boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value) || StringUtils.isEmpty(authenticator)) {
String msg = "Invalid argument: Authenticator property, property value or authenticator name is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByAuthenticatorPropertyValue(
null, property, value, authenticator, tenantId, tenantDomain);
if (identityProvider == null && !ignoreFileBasedIdps) {
identityProvider = new FileBasedIdPMgtDAO()
.getIdPByAuthenticatorPropertyValue(property, value, tenantDomain, authenticator);
}
return identityProvider;
}
/**
* Retrieves Enabled Identity provider information about a given tenant by Identity Provider name
*
* @param idPName Unique name of the Identity provider of whose information is requested
* @param tenantDomain Tenant domain whose information is requested
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP name
*/
@Override
public IdentityProvider getEnabledIdPByName(String idPName, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByName(idPName, tenantDomain);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant by realm identifier
*
* @param realmId Unique realm identifier of the Identity provider of whose information is
* requested
* @param tenantDomain Tenant domain whose information is requested
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP home realm identifier
*/
@Override
public IdentityProvider getIdPByRealmId(String realmId, String tenantDomain)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(realmId)) {
String msg = "Invalid argument: Identity Provider Home Realm Identifier value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByRealmId(realmId, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByRealmId(realmId, tenantDomain);
}
return identityProvider;
}
/**
* Retrieves Enabled Identity provider information about a given tenant by realm identifier
*
* @param realmId Unique realm identifier of the Identity provider of whose information is
* requested
* @param tenantDomain Tenant domain whose information is requested
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP home realm identifier
*/
@Override
public IdentityProvider getEnabledIdPByRealmId(String realmId, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByRealmId(realmId, tenantDomain);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given IdP claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param idPClaimURIs IdP claim URIs which need to be mapped to tenant's local claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Set<ClaimMapping> getMappedLocalClaims(String idPName, String tenantDomain,
List<String> idPClaimURIs) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
ClaimConfig claimConfiguration = identityProvider.getClaimConfig();
if (claimConfiguration != null) {
ClaimMapping[] claimMappings = claimConfiguration.getClaimMappings();
if (claimMappings != null && claimMappings.length > 0 && idPClaimURIs != null) {
Set<ClaimMapping> returnSet = new HashSet<ClaimMapping>();
for (String idpClaim : idPClaimURIs) {
for (ClaimMapping claimMapping : claimMappings) {
if (claimMapping.getRemoteClaim().getClaimUri().equals(idpClaim)) {
returnSet.add(claimMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<ClaimMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given IdP claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param idPClaimURIs IdP claim URIs which need to be mapped to tenant's local claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Map<String, String> getMappedLocalClaimsMap(String idPName, String tenantDomain,
List<String> idPClaimURIs) throws
IdentityProviderManagementException {
Set<ClaimMapping> claimMappings = getMappedLocalClaims(idPName, tenantDomain, idPClaimURIs);
Map<String, String> returnMap = new HashMap<String, String>();
for (ClaimMapping claimMapping : claimMappings) {
returnMap.put(claimMapping.getRemoteClaim().getClaimUri(), claimMapping.getLocalClaim()
.getClaimUri());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given local claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param localClaimURIs Local claim URIs which need to be mapped to IdP's claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Set<ClaimMapping> getMappedIdPClaims(String idPName, String tenantDomain,
List<String> localClaimURIs) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
ClaimConfig claimConfiguration = identityProvider.getClaimConfig();
if (claimConfiguration != null) {
ClaimMapping[] claimMappings = claimConfiguration.getClaimMappings();
if (claimMappings != null && claimMappings.length > 0 && localClaimURIs != null) {
Set<ClaimMapping> returnSet = new HashSet<ClaimMapping>();
for (String localClaimURI : localClaimURIs) {
for (ClaimMapping claimMapping : claimMappings) {
if (claimMapping.getLocalClaim().getClaimUri().equals(localClaimURI)) {
returnSet.add(claimMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<ClaimMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given local claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param localClaimURIs Local claim URIs which need to be mapped to IdP's claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Map<String, String> getMappedIdPClaimsMap(String idPName, String tenantDomain,
List<String> localClaimURIs) throws
IdentityProviderManagementException {
Set<ClaimMapping> claimMappings = getMappedIdPClaims(idPName, tenantDomain, localClaimURIs);
Map<String, String> returnMap = new HashMap<String, String>();
for (ClaimMapping claimMapping : claimMappings) {
returnMap.put(claimMapping.getLocalClaim().getClaimUri(), claimMapping.getRemoteClaim()
.getClaimUri());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given IdP roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles to be mapped
* @param idPRoles IdP roles which need to be mapped to local roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Set<RoleMapping> getMappedLocalRoles(String idPName, String tenantDomain,
String[] idPRoles) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
PermissionsAndRoleConfig roleConfiguration = identityProvider.getPermissionAndRoleConfig();
if (roleConfiguration != null) {
RoleMapping[] roleMappings = roleConfiguration.getRoleMappings();
if (roleMappings != null && roleMappings.length > 0 && idPRoles != null) {
Set<RoleMapping> returnSet = new HashSet<RoleMapping>();
for (String idPRole : idPRoles) {
for (RoleMapping roleMapping : roleMappings) {
if (roleMapping.getRemoteRole().equals(idPRole)) {
returnSet.add(roleMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<RoleMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given IdP roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles to be mapped
* @param idPRoles IdP roles which need to be mapped to local roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Map<String, LocalRole> getMappedLocalRolesMap(String idPName, String tenantDomain,
String[] idPRoles) throws IdentityProviderManagementException {
Set<RoleMapping> roleMappings = getMappedLocalRoles(idPName, tenantDomain, idPRoles);
Map<String, LocalRole> returnMap = new HashMap<String, LocalRole>();
for (RoleMapping roleMapping : roleMappings) {
returnMap.put(roleMapping.getRemoteRole(), roleMapping.getLocalRole());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given local roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles need to be mapped
* @param localRoles Local roles which need to be mapped to IdP roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Set<RoleMapping> getMappedIdPRoles(String idPName, String tenantDomain,
LocalRole[] localRoles) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
PermissionsAndRoleConfig roleConfiguration = identityProvider.getPermissionAndRoleConfig();
if (roleConfiguration != null) {
RoleMapping[] roleMappings = roleConfiguration.getRoleMappings();
if (roleMappings != null && roleMappings.length > 0 && localRoles != null) {
Set<RoleMapping> returnSet = new HashSet<RoleMapping>();
for (LocalRole localRole : localRoles) {
for (RoleMapping roleMapping : roleMappings) {
if (roleMapping.getLocalRole().equals(localRole)) {
returnSet.add(roleMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<RoleMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given local roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles need to be mapped
* @param localRoles Local roles which need to be mapped to IdP roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Map<LocalRole, String> getMappedIdPRolesMap(String idPName, String tenantDomain,
LocalRole[] localRoles) throws
IdentityProviderManagementException {
Set<RoleMapping> roleMappings = getMappedIdPRoles(idPName, tenantDomain, localRoles);
Map<LocalRole, String> returnMap = new HashMap<LocalRole, String>();
for (RoleMapping roleMapping : roleMappings) {
returnMap.put(roleMapping.getLocalRole(), roleMapping.getRemoteRole());
}
return returnMap;
}
/**
* If metadata file is available, creates a new FederatedAuthenticatorConfig from that
*
* @param identityProvider
* @throws IdentityProviderManagementException
*/
private String handleMetadta(IdentityProvider identityProvider) throws IdentityProviderManagementException {
StringBuilder metadata = new StringBuilder();
if (IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().isEmpty()) {
throw new IdentityProviderManagementException("Metadata Converter is not set");
}
FederatedAuthenticatorConfig federatedAuthenticatorConfigs[] = identityProvider.getFederatedAuthenticatorConfigs();
for (int i = 0; i < federatedAuthenticatorConfigs.length; i++) {
Property properties[] = federatedAuthenticatorConfigs[i].getProperties();
if (ArrayUtils.isNotEmpty(properties)) {
for (int j = 0; j < properties.length; j++) {
if (properties[j] != null) {
if (properties[j].getName() != null && properties[j].getName().contains(IdPManagementConstants.META_DATA)) {
for (int v = 0; v < IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()
.size(); v++) {
MetadataConverter metadataConverter = IdpMgtServiceComponentHolder.getInstance()
.getMetadataConverters().get(v);
if (metadataConverter.canHandle(properties[j])) {
SAML2SSOMetadataConverter = metadataConverter;
try {
metadata.append(properties[j].getValue());
StringBuilder certificate = new StringBuilder("");
try {
FederatedAuthenticatorConfig metaFederated = metadataConverter.getFederatedAuthenticatorConfig(properties, certificate);
String spName = "";
for (int b = 0; b < properties.length; b++) {
if (properties[b] != null && properties[b].getName() != null &&
properties[b].getName().toString().equals(IdentityApplicationConstants.Authenticator.SAML2SSO.SP_ENTITY_ID)) {
spName = properties[b].getValue();
}
}
if (spName.equals("")) {
throw new IdentityProviderManagementException("SP name can't be empty");
}
if (metaFederated != null && ArrayUtils.isNotEmpty(metaFederated.getProperties())) {
for (int y = 0; y < metaFederated.getProperties().length; y++) {
if (metaFederated.getProperties()[y] != null && metaFederated.getProperties()[y].getName() != null
&& metaFederated.getProperties()[y].getName().toString().equals(IdentityApplicationConstants.Authenticator.SAML2SSO.SP_ENTITY_ID)) {
metaFederated.getProperties()[y].setValue(spName);
break;
}
}
}
if (metaFederated != null && metaFederated.getProperties() != null && metaFederated.getProperties().length > 0) {
federatedAuthenticatorConfigs[i].setProperties(metaFederated.getProperties());
} else {
throw new IdentityProviderManagementException("Error setting metadata using file");
}
} catch (IdentityProviderManagementException ex) {
throw new IdentityProviderManagementException("Error converting metadata", ex);
}
if (certificate.toString().length() > 0) {
identityProvider.setCertificate(certificate.toString());
}
} catch (XMLStreamException e) {
throw new IdentityProviderManagementException("Error while configuring metadata", e);
}
break;
}
}
}
}
}
}
}
return metadata.toString();
}
/**
* Adds an Identity Provider to the given tenant
*
* @param identityProvider new Identity Provider information
* @throws IdentityProviderManagementException Error when adding Identity Provider
* information
*/
@Override
public void addIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
addIdPWithResourceId(identityProvider, tenantDomain);
}
/**
* Adds an Identity Provider to the given tenant
*
* @param identityProvider new Identity Provider information
* @throws IdentityProviderManagementException Error when adding Identity Provider
* information
*/
@Override
public IdentityProvider addIdPWithResourceId(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
validateAddIdPInputValues(identityProvider.getIdentityProviderName(), tenantDomain);
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreAddIdP(identityProvider, tenantDomain)) {
return null;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (isPermissionAndRoleConfigExist(identityProvider)) {
verifyAndUpdateRoleConfiguration(tenantDomain, tenantId, identityProvider.getPermissionAndRoleConfig());
}
validateIdPEntityId(identityProvider.getFederatedAuthenticatorConfigs(), tenantId, tenantDomain);
String idpName = identityProvider.getIdentityProviderName();
String metadata = handleMetadta(identityProvider);
if (isMetadataFileExist(idpName, metadata)) {
if (SAML2SSOMetadataConverter != null) {
SAML2SSOMetadataConverter.saveMetadataString(tenantId, idpName, metadata);
} else {
String data = "Couldn't save metadata in registry.SAML2SSOMetadataConverter is not set.";
throw IdPManagementUtil.handleServerException(IdPManagementConstants.ErrorMessage.ERROR_CODE_ADD_IDP,
data);
}
}
identityProvider = dao.addIdP(identityProvider, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostAddIdP(identityProvider, tenantDomain)) {
return null;
}
}
return identityProvider;
}
/**
* Deletes an Identity Provider from a given tenant
*
* @param idPName Name of the IdP to be deleted
* @throws IdentityProviderManagementException Error when deleting Identity Provider
* information
*/
@Override
public void deleteIdP(String idPName, String tenantDomain) throws IdentityProviderManagementException {
IdentityProvider identityProvider = this.getIdPByName(idPName, tenantDomain, true);
if (identityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, idPName);
}
deleteIdPByResourceId(identityProvider.getResourceId(), tenantDomain);
}
/**
* Deletes an Identity Provider from a given tenant.
*
* @param resourceId Resource ID of the IdP to be deleted
* @throws IdentityProviderManagementException Error when deleting Identity Provider
* information
*/
@Override
public void deleteIdPByResourceId(String resourceId, String tenantDomain) throws
IdentityProviderManagementException {
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPByResourceId(resourceId, tenantDomain)) {
return;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
IdentityProvider idp = getIdPByResourceId(resourceId, tenantDomain, true);
if (SAML2SSOMetadataConverter != null) {
String idpName = null;
if (idp != null) {
idpName = idp.getIdentityProviderName();
}
SAML2SSOMetadataConverter.deleteMetadataString(tenantId, idpName);
}
dao.deleteIdPByResourceId(resourceId, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdPByResourceId(resourceId, idp, tenantDomain)) {
return;
}
}
}
/**
* Force delete an Identity Provider from a given tenant. This will remove any associations this Identity
* Provider has with any Service Providers in authentication steps or provisioning.
*
* @param idpName name of IDP to be deleted
* @param tenantDomain tenantDomain to which the IDP belongs to
*/
public void forceDeleteIdp(String idpName, String tenantDomain) throws IdentityProviderManagementException {
IdentityProvider identityProvider = this
.getIdPByName(idpName, tenantDomain, true);
if (identityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, idpName);
}
forceDeleteIdpByResourceId(identityProvider.getResourceId(), tenantDomain);
}
/**
* Force delete an Identity Provider from a given tenant. This will remove any associations this Identity
* Provider has with any Service Providers in authentication steps or provisioning.
*
* @param resourceId resource ID of IDP to be deleted
* @param tenantDomain tenantDomain to which the IDP belongs to
*/
public void forceDeleteIdpByResourceId(String resourceId, String tenantDomain) throws
IdentityProviderManagementException {
// Invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPByResourceId(resourceId, tenantDomain)) {
return;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
IdentityProvider idp = getIdPByResourceId(resourceId, tenantDomain, true);
if (SAML2SSOMetadataConverter != null) {
String idpName = null;
if (idp != null) {
idpName = idp.getIdentityProviderName();
}
SAML2SSOMetadataConverter.deleteMetadataString(tenantId, idpName);
}
dao.forceDeleteIdPByResourceId(resourceId, tenantId, tenantDomain);
// Invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdPByResourceId(resourceId, idp, tenantDomain)) {
return;
}
}
}
/**
* Updates a given Identity Provider information
*
* @param oldIdPName existing Identity Provider name
* @param newIdentityProvider new IdP information
* @throws IdentityProviderManagementException Error when updating Identity Provider
* information
*/
@Override
public void updateIdP(String oldIdPName, IdentityProvider newIdentityProvider,
String tenantDomain) throws IdentityProviderManagementException {
IdentityProvider currentIdentityProvider = this
.getIdPByName(oldIdPName, tenantDomain, true);
if (currentIdentityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, oldIdPName);
}
updateIdPByResourceId(currentIdentityProvider.getResourceId(), newIdentityProvider, tenantDomain);
}
/**
* Updates a given Identity Provider information
*
* @param resourceId existing Identity Provider resourceId
* @param newIdentityProvider new IdP information
* @param tenantDomain tenant domain of IDP.
* @throws IdentityProviderManagementException Error when updating Identity Provider
* information
*/
@Override
public IdentityProvider updateIdPByResourceId(String resourceId, IdentityProvider
newIdentityProvider, String tenantDomain) throws IdentityProviderManagementException {
IdentityProvider currentIdentityProvider = this
.getIdPByResourceId(resourceId, tenantDomain, true);
validateUpdateIdPInputValues(currentIdentityProvider, resourceId, newIdentityProvider.getIdentityProviderName());
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateIdPByResourceId(resourceId, newIdentityProvider,
tenantDomain)) {
return null;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (isPermissionAndRoleConfigExist(newIdentityProvider)) {
verifyAndUpdateRoleConfiguration(tenantDomain, tenantId, newIdentityProvider.getPermissionAndRoleConfig());
}
validateUpdateOfIdPEntityId(currentIdentityProvider.getFederatedAuthenticatorConfigs(),
newIdentityProvider.getFederatedAuthenticatorConfigs(),
tenantId, tenantDomain);
String idpName = newIdentityProvider.getIdentityProviderName();
String metadata = handleMetadta(newIdentityProvider);
if (isMetadataFileExist(idpName, metadata)) {
if (SAML2SSOMetadataConverter != null) {
SAML2SSOMetadataConverter.saveMetadataString(tenantId, idpName, metadata);
} else {
String data = "Couldn't save metadata in registry.SAML2SSOMetadataConverter is not set.";
throw IdPManagementUtil.handleServerException(IdPManagementConstants.ErrorMessage.ERROR_CODE_ADD_IDP,
data);
}
}
IdentityProvider updateIdP = dao.updateIdP(newIdentityProvider, currentIdentityProvider, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateIdPByResourceId(resourceId, currentIdentityProvider,
newIdentityProvider, tenantDomain)) {
return null;
}
}
return updateIdP;
}
/**
* Get the authenticators registered in the system.
*
* @return <code>FederatedAuthenticatorConfig</code> array.
* @throws IdentityProviderManagementException Error when getting authenticators registered
* in the system
*/
@Override
public FederatedAuthenticatorConfig[] getAllFederatedAuthenticators()
throws IdentityProviderManagementException {
List<FederatedAuthenticatorConfig> appConfig = ApplicationAuthenticatorService
.getInstance().getFederatedAuthenticators();
if (CollectionUtils.isNotEmpty(appConfig)) {
return appConfig.toArray(new FederatedAuthenticatorConfig[appConfig.size()]);
}
return new FederatedAuthenticatorConfig[0];
}
/**
* Get the Provisioning Connectors registered in the system.
*
* @return <code>ProvisioningConnectorConfig</code> array.
* @throws IdentityProviderManagementException
*/
@Override
public ProvisioningConnectorConfig[] getAllProvisioningConnectors()
throws IdentityProviderManagementException {
List<ProvisioningConnectorConfig> connectorConfigs = ProvisioningConnectorService
.getInstance().getProvisioningConnectorConfigs();
if (connectorConfigs != null && connectorConfigs.size() > 0) {
return connectorConfigs.toArray(new ProvisioningConnectorConfig[connectorConfigs.size()]);
}
return null;
}
private boolean validateIdPEntityId(FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs,
int tenantId, String tenantDomain) throws IdentityProviderManagementException {
if (federatedAuthenticatorConfigs != null) {
for (FederatedAuthenticatorConfig authConfig : federatedAuthenticatorConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME.equals(authConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(authConfig.getName())) {
Property[] properties = authConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(
property.getName())) {
if (dao.isIdPAvailableForAuthenticatorProperty(authConfig.getName(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID,
property.getValue(), tenantId)) {
String msg = "An Identity Provider Entity ID has already been registered with the " +
"name '" + property.getValue() + "' for tenant '" + tenantDomain + "'";
throw new IdentityProviderManagementException(msg);
}
return true;
}
}
}
}
}
}
return true;
}
private boolean validateUpdateOfIdPEntityId(FederatedAuthenticatorConfig[] currentFederatedAuthConfigs,
FederatedAuthenticatorConfig[] newFederatedAuthConfigs,
int tenantId, String tenantDomain)
throws IdentityProviderManagementException {
String currentIdentityProviderEntityId = null;
if (currentFederatedAuthConfigs != null) {
for (FederatedAuthenticatorConfig fedAuthnConfig : currentFederatedAuthConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME.equals(fedAuthnConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(fedAuthnConfig.getName())) {
Property[] properties = fedAuthnConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals
(property.getName())) {
currentIdentityProviderEntityId = property.getValue();
break;
}
}
}
break;
}
}
}
if (newFederatedAuthConfigs != null) {
for (FederatedAuthenticatorConfig fedAuthnConfig : newFederatedAuthConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME.equals(fedAuthnConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(fedAuthnConfig.getName())) {
Property[] properties = fedAuthnConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.
getName())) {
if (currentIdentityProviderEntityId != null && currentIdentityProviderEntityId.equals
(property.getValue())) {
return true;
} else {
if (dao.isIdPAvailableForAuthenticatorProperty(fedAuthnConfig.getName(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID,
property.getValue(), tenantId)) {
String msg = "An Identity Provider Entity ID has already been registered " +
"with the name '" +
property.getValue() + "' for tenant '" + tenantDomain + "'";
throw new IdentityProviderManagementException(msg);
}
return true;
}
}
}
}
break;
}
}
}
return true;
}
private String getOIDCResidentIdPEntityId() {
String OIDCEntityId = IdentityUtil.getProperty("OAuth.OpenIDConnect.IDTokenIssuerID");
if (StringUtils.isBlank(OIDCEntityId)) {
OIDCEntityId = "localhost";
}
return OIDCEntityId;
}
public String getResidentIDPMetadata(String tenantDomain) throws IdentityProviderManagementException {
if (IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().isEmpty()) {
throw new IdentityProviderManagementException("Error receiving Metadata object");
}
IdentityProvider residentIdentityProvider = this.getResidentIdP(tenantDomain);
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs = residentIdentityProvider.getFederatedAuthenticatorConfigs();
FederatedAuthenticatorConfig samlFederatedAuthenticatorConfig = null;
for (int i = 0; i < federatedAuthenticatorConfigs.length; i++) {
if (federatedAuthenticatorConfigs[i].getName().equals(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME)) {
samlFederatedAuthenticatorConfig = federatedAuthenticatorConfigs[i];
break;
}
}
if (samlFederatedAuthenticatorConfig != null) {
try {
for (int t = 0; t < IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().size(); t++) {
MetadataConverter converter = IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()
.get(t);
if (converter.canHandle(samlFederatedAuthenticatorConfig)) {
return converter.getMetadataString(samlFederatedAuthenticatorConfig);
}
}
} catch (IdentityProviderSAMLException e) {
throw new IdentityProviderManagementException(e.getMessage());
}
}
return null;
}
/**
* Overrides the persisted endpoint URLs (e.g. SAML endpoint) if the hostname/port has been changed.
* @param residentIDP
* @throws IdentityProviderManagementException
*/
private void overrideResidentIdpEPUrls(IdentityProvider residentIDP)
throws IdentityProviderManagementException {
// Not all endpoints are persisted. So we need to update only a few properties.
String passiveStsUrl = IdentityUtil.getServerURL(IdentityConstants.STS.PASSIVE_STS, true, true);
updateFederationAuthenticationConfigProperty(residentIDP,
IdentityApplicationConstants.Authenticator.PassiveSTS.NAME, IdentityApplicationConstants
.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL, passiveStsUrl);
}
/**
* Updates the property values of the given property name of the given authenticator.
*
* @param residentIdentityProvider
* @param authenticatorName
* @param propertyName
* @param newValue
* @return true if the value was updated, false if the value is up to date.
*/
private boolean updateFederationAuthenticationConfigProperty(IdentityProvider residentIdentityProvider, String
authenticatorName, String propertyName, String newValue) {
FederatedAuthenticatorConfig federatedAuthenticatorConfig = IdentityApplicationManagementUtil
.getFederatedAuthenticator(residentIdentityProvider.getFederatedAuthenticatorConfigs(),
authenticatorName);
if (federatedAuthenticatorConfig != null) {
Property existingProperty = IdentityApplicationManagementUtil.getProperty(federatedAuthenticatorConfig
.getProperties(), propertyName);
if (existingProperty != null) {
String existingPropertyValue = existingProperty.getValue();
if (!StringUtils.equalsIgnoreCase(existingPropertyValue, newValue)) {
existingProperty.setValue(newValue);
return true;
}
}
}
return false;
}
private String getTenantUrl(String url, String tenantDomain) throws URISyntaxException {
URI uri = new URI(url);
URI uriModified = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), ("/t/" +
tenantDomain + uri.getPath()), uri.getQuery(), uri.getFragment());
return uriModified.toString();
}
private void verifyAndUpdateRoleConfiguration(String tenantDomain, int tenantId,
PermissionsAndRoleConfig roleConfiguration) throws IdentityProviderManagementException {
List<RoleMapping> validRoleMappings = new ArrayList<>();
List<String> validIdPRoles = new ArrayList<>();
for (RoleMapping mapping : roleConfiguration.getRoleMappings()) {
try {
if (mapping.getRemoteRole() == null || mapping.getLocalRole() == null || StringUtils
.isBlank(mapping.getLocalRole().getLocalRoleName())) {
continue;
}
UserStoreManager usm = IdPManagementServiceComponent.getRealmService().getTenantUserRealm(tenantId)
.getUserStoreManager();
String role = mapping.getLocalRole().getLocalRoleName();
if (StringUtils.isNotBlank(mapping.getLocalRole().getUserStoreId())) {
role = IdentityUtil.addDomainToName(role, mapping.getLocalRole().getUserStoreId());
}
// Remove invalid mappings if local role does not exists.
if (usm.isExistingRole(role)) {
validRoleMappings.add(mapping);
validIdPRoles.add(mapping.getRemoteRole());
} else {
if (log.isDebugEnabled()) {
log.debug("Invalid local role name: " + role + " for the federated role: " + mapping
.getRemoteRole());
}
}
} catch (UserStoreException e) {
throw new IdentityProviderManagementException(
"Error occurred while retrieving UserStoreManager for tenant " + tenantDomain, e);
}
}
roleConfiguration.setRoleMappings(validRoleMappings.toArray(new RoleMapping[0]));
roleConfiguration.setIdpRoles(validIdPRoles.toArray(new String[0]));
}
/**
* Validate input parameters for the getIdPByResourceId function.
*
* @param resourceId Identity Provider resource ID.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateGetIdPInputValues(String resourceId) throws IdentityProviderManagementException {
if (StringUtils.isEmpty(resourceId)) {
String data = "Invalid argument: Identity Provider resource ID value is empty";
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_GET_REQUEST_INVALID, data);
}
}
/**
* Validate input parameters for the addIdPWithResourceId function.
*
* @param idpName Identity Provider name.
* @param tenantDomain Tenant domain of IDP.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateAddIdPInputValues(String idpName, String tenantDomain) throws
IdentityProviderManagementException {
if (IdentityProviderManager.getInstance().getIdPByName(idpName, tenantDomain, true) != null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, idpName);
}
if (IdPManagementServiceComponent.getFileBasedIdPs().containsKey(idpName)
&& !idpName.startsWith(IdPManagementConstants.SHARED_IDP_PREFIX)) {
//If an IDP with name starting with "SHARED_" is added from UI, It's blocked at the service class
// before calling this method
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, idpName);
}
}
/**
* Validate input parameters for the updateIdPByResourceId function.
*
* @param currentIdentityProvider Old Identity Provider Information.
* @param resourceId Identity Provider's resource ID.
* @param newIdPName New Identity Provider name.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateUpdateIdPInputValues(IdentityProvider currentIdentityProvider, String resourceId, String
newIdPName) throws IdentityProviderManagementException {
if (IdPManagementServiceComponent.getFileBasedIdPs().containsKey(newIdPName)) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, newIdPName);
}
if (currentIdentityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, resourceId);
}
}
/**
* Check whether PermissionAndRoleConfig is configured for the IDP.
*
* @param identityProvider Identity Provider information.
* @return whether config exists.
*/
private boolean isPermissionAndRoleConfigExist(IdentityProvider identityProvider) {
return identityProvider.getPermissionAndRoleConfig() != null
&& identityProvider.getPermissionAndRoleConfig().getRoleMappings() != null;
}
/**
* Check whether metadata file is configured for the IDP.
*
* @param idpName Identity Provider name.
* @param metadata Metadata string.
* @return whether metadata exists.
*/
private boolean isMetadataFileExist(String idpName, String metadata) {
return StringUtils.isNotEmpty(idpName) && StringUtils.isNotEmpty(metadata);
}
}
|
components/idp-mgt/org.wso2.carbon.idp.mgt/src/main/java/org/wso2/carbon/idp/mgt/IdentityProviderManager.java
|
/*
* Copyright (c) 2014 WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.idp.mgt;
import org.apache.axiom.om.util.Base64;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.base.ServerConfiguration;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.core.util.KeyStoreManager;
import org.wso2.carbon.identity.application.common.ApplicationAuthenticatorService;
import org.wso2.carbon.identity.application.common.ProvisioningConnectorService;
import org.wso2.carbon.identity.application.common.model.ClaimConfig;
import org.wso2.carbon.identity.application.common.model.ClaimMapping;
import org.wso2.carbon.identity.application.common.model.FederatedAuthenticatorConfig;
import org.wso2.carbon.identity.application.common.model.IdentityProvider;
import org.wso2.carbon.identity.application.common.model.IdentityProviderProperty;
import org.wso2.carbon.identity.application.common.model.LocalRole;
import org.wso2.carbon.identity.application.common.model.PermissionsAndRoleConfig;
import org.wso2.carbon.identity.application.common.model.Property;
import org.wso2.carbon.identity.application.common.model.ProvisioningConnectorConfig;
import org.wso2.carbon.identity.application.common.model.RoleMapping;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationConstants;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationManagementUtil;
import org.wso2.carbon.identity.base.IdentityConstants;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.idp.mgt.dao.CacheBackedIdPMgtDAO;
import org.wso2.carbon.idp.mgt.dao.FileBasedIdPMgtDAO;
import org.wso2.carbon.idp.mgt.dao.IdPManagementDAO;
import org.wso2.carbon.idp.mgt.internal.IdPManagementServiceComponent;
import org.wso2.carbon.idp.mgt.internal.IdpMgtServiceComponentHolder;
import org.wso2.carbon.idp.mgt.listener.IdentityProviderMgtListener;
import org.wso2.carbon.identity.core.model.ExpressionNode;
import org.wso2.carbon.idp.mgt.object.IdpSearchResult;
import org.wso2.carbon.identity.core.model.Node;
import org.wso2.carbon.identity.core.model.OperationNode;
import org.wso2.carbon.identity.core.model.FilterTreeBuilder;
import org.wso2.carbon.idp.mgt.util.IdPManagementConstants;
import org.wso2.carbon.idp.mgt.util.IdPManagementUtil;
import org.wso2.carbon.idp.mgt.util.MetadataConverter;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.KeyStore;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.stream.XMLStreamException;
public class IdentityProviderManager implements IdpManager {
private static final Log log = LogFactory.getLog(IdentityProviderManager.class);
private static CacheBackedIdPMgtDAO dao = new CacheBackedIdPMgtDAO(new IdPManagementDAO());
private static volatile IdentityProviderManager instance = new IdentityProviderManager();
private static final String OPENID_IDP_ENTITY_ID = "IdPEntityId";
private MetadataConverter SAML2SSOMetadataConverter = null;
private IdentityProviderManager() {
}
/**
* @return
*/
public static IdentityProviderManager getInstance() {
return instance;
}
/**
* Retrieves resident Identity provider for a given tenant
*
* @param tenantDomain Tenant domain whose resident IdP is requested
* @return <code>LocalIdentityProvider</code>
* @throws IdentityProviderManagementException Error when getting Resident Identity Providers
*/
@Override
public IdentityProvider getResidentIdP(String tenantDomain)
throws IdentityProviderManagementException {
IdPManagementUtil.setTenantSpecifiers(tenantDomain);
String openIdUrl;
String samlSSOUrl;
String samlLogoutUrl;
String samlECPUrl;
String samlArtifactUrl;
String oauth1RequestTokenUrl;
String oauth1AuthorizeUrl;
String oauth1AccessTokenUrl;
String oauth2AuthzEPUrl;
String oauth2TokenEPUrl;
String oauth2RevokeEPUrl;
String oauth2IntrospectEpUrl;
String oauth2UserInfoEPUrl;
String oidcCheckSessionEPUrl;
String oidcLogoutEPUrl;
String oIDCWebFingerEPUrl;
String oAuth2DCREPUrl;
String oAuth2JWKSPage;
String oIDCDiscoveryEPUrl;
String passiveStsUrl;
String stsUrl;
String scimUsersEndpoint;
String scimGroupsEndpoint;
String scim2UsersEndpoint;
String scim2GroupsEndpoint;
openIdUrl = IdentityUtil.getProperty(IdentityConstants.ServerConfig.OPENID_SERVER_URL);
samlECPUrl = IdentityUtil.getProperty(IdentityConstants.ServerConfig.SAML_ECP_URL);
samlArtifactUrl = IdentityUtil.getProperty(IdentityConstants.ServerConfig.SSO_ARTIFACT_URL);
oauth1RequestTokenUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_REQUEST_TOKEN_URL);
oauth1AuthorizeUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_AUTHORIZE_URL);
oauth1AccessTokenUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_ACCESSTOKEN_URL);
oauth2AuthzEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_AUTHZ_EP_URL);
oauth2TokenEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_TOKEN_EP_URL);
oauth2UserInfoEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_USERINFO_EP_URL);
oidcCheckSessionEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_CHECK_SESSION_EP_URL);
oidcLogoutEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_LOGOUT_EP_URL);
passiveStsUrl = IdentityUtil.getProperty(IdentityConstants.STS.PSTS_IDENTITY_PROVIDER_URL);
stsUrl = IdentityUtil.getProperty(IdentityConstants.STS.STS_IDENTITY_PROVIDER_URL);
scimUsersEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM.USER_EP_URL);
scimGroupsEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM.GROUP_EP_URL);
scim2UsersEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM2.USER_EP_URL);
scim2GroupsEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM2.GROUP_EP_URL);
oauth2RevokeEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_REVOKE_EP_URL);
oauth2IntrospectEpUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_INTROSPECT_EP_URL);
oIDCWebFingerEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_WEB_FINGER_EP_URL);
oAuth2DCREPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_DCR_EP_URL);
oAuth2JWKSPage = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_JWKS_EP_URL);
oIDCDiscoveryEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_DISCOVERY_EP_URL);
if (StringUtils.isBlank(openIdUrl)) {
openIdUrl = IdentityUtil.getServerURL(IdentityConstants.OpenId.OPENID, true, true);
}
samlSSOUrl = IdentityUtil.getServerURL(IdentityConstants.ServerConfig.SAMLSSO, true, true)
+ IdPManagementUtil.getTenantParameter();
samlLogoutUrl = IdentityUtil.getServerURL(IdentityConstants.ServerConfig.SAMLSSO, true, true)
+ IdPManagementUtil.getTenantParameter();
if (StringUtils.isBlank(samlArtifactUrl)) {
samlArtifactUrl = IdentityUtil.getServerURL(IdentityConstants.ServerConfig.SAMLSSO, true, true);
}
if (StringUtils.isBlank(samlECPUrl)) {
samlECPUrl = IdentityUtil.getServerURL(IdentityConstants.ServerConfig.SAMLSSO, true, true);
}
if (StringUtils.isBlank(oauth1RequestTokenUrl)) {
oauth1RequestTokenUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.REQUEST_TOKEN, true, true);
}
if (StringUtils.isBlank(oauth1AuthorizeUrl)) {
oauth1AuthorizeUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.AUTHORIZE_URL, true, true);
}
if (StringUtils.isBlank(oauth1AccessTokenUrl)) {
oauth1AccessTokenUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.ACCESS_TOKEN, true, true);
}
if (StringUtils.isBlank(oauth2AuthzEPUrl)) {
oauth2AuthzEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.AUTHORIZE, true, false);
}
if (StringUtils.isBlank(oauth2TokenEPUrl)) {
oauth2TokenEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.TOKEN, true, false);
}
if (StringUtils.isBlank(oauth2RevokeEPUrl)) {
oauth2RevokeEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.REVOKE, true, false);
}
if (StringUtils.isBlank(oauth2IntrospectEpUrl)) {
oauth2IntrospectEpUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.INTROSPECT, true, false);
}
if (StringUtils.isBlank(oauth2UserInfoEPUrl)) {
oauth2UserInfoEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.USERINFO, true, false);
}
if (StringUtils.isBlank(oidcCheckSessionEPUrl)) {
oidcCheckSessionEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.CHECK_SESSION, true, false);
}
if (StringUtils.isBlank(oidcLogoutEPUrl)) {
oidcLogoutEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.LOGOUT, true, false);
}
if (StringUtils.isBlank(passiveStsUrl)) {
passiveStsUrl = IdentityUtil.getServerURL(IdentityConstants.STS.PASSIVE_STS, true, true);
}
if (StringUtils.isBlank(oIDCWebFingerEPUrl)) {
oIDCWebFingerEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.WEBFINGER, true, true);
}
if (StringUtils.isBlank(oAuth2DCREPUrl)) {
oAuth2DCREPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.DCR, true, true);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
oAuth2DCREPUrl = getTenantUrl(oAuth2DCREPUrl, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("OAuth 2 DCR endpoint is malformed");
}
if (StringUtils.isBlank(oAuth2JWKSPage)) {
oAuth2JWKSPage = IdentityUtil.getServerURL(IdentityConstants.OAuth.JWKS, true, true);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
oAuth2JWKSPage = getTenantUrl(oAuth2JWKSPage, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("OAuth 2 JWKS endpoint is malformed");
}
if (StringUtils.isBlank(oIDCDiscoveryEPUrl)) {
oIDCDiscoveryEPUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.DISCOVERY, true, true);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
oIDCDiscoveryEPUrl = getTenantUrl(oIDCDiscoveryEPUrl, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("OIDC Discovery endpoint is malformed");
}
// If sts url is configured in file, change it according to tenant domain. If not configured, add a default url
if (StringUtils.isNotBlank(stsUrl)) {
stsUrl = stsUrl.replace(IdentityConstants.STS.WSO2_CARBON_STS, IdPManagementUtil.getTenantContext() +
IdentityConstants.STS.WSO2_CARBON_STS);
} else {
stsUrl = IdentityUtil.getServerURL("services/" + IdPManagementUtil.getTenantContext() +
IdentityConstants.STS.WSO2_CARBON_STS, true, true);
}
if (StringUtils.isBlank(scimUsersEndpoint)) {
scimUsersEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM.USER_EP, true, false);
}
if (StringUtils.isBlank(scimGroupsEndpoint)) {
scimGroupsEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM.GROUP_EP, true, false);
}
if (StringUtils.isBlank(scim2UsersEndpoint)) {
scim2UsersEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM2.USER_EP, true, false);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
scim2UsersEndpoint = getTenantUrl(scim2UsersEndpoint, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("SCIM 2.0 Users endpoint is malformed");
}
if (StringUtils.isBlank(scim2GroupsEndpoint)) {
scim2GroupsEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM2.GROUP_EP, true, false);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
scim2GroupsEndpoint = getTenantUrl(scim2GroupsEndpoint, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("SCIM 2.0 Groups endpoint is malformed");
}
IdentityProvider identityProvider = dao.getIdPByName(null,
IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME,
IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
if (identityProvider == null) {
String message = "Could not find Resident Identity Provider for tenant " + tenantDomain;
throw new IdentityProviderManagementException(message);
}
int tenantId = -1;
try {
tenantId = IdPManagementServiceComponent.getRealmService().getTenantManager().getTenantId(tenantDomain);
} catch (UserStoreException e) {
throw new IdentityProviderManagementException(
"Exception occurred while retrieving Tenant ID from Tenant Domain " + tenantDomain, e);
}
X509Certificate cert = null;
try {
IdentityTenantUtil.initializeRegistry(tenantId, tenantDomain);
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext();
carbonContext.setTenantDomain(tenantDomain, true);
KeyStoreManager keyStoreManager = KeyStoreManager.getInstance(tenantId);
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
// derive key store name
String ksName = tenantDomain.trim().replace(".", "-");
// derive JKS name
String jksName = ksName + ".jks";
KeyStore keyStore = keyStoreManager.getKeyStore(jksName);
cert = (X509Certificate) keyStore.getCertificate(tenantDomain);
} else {
cert = keyStoreManager.getDefaultPrimaryCertificate();
}
} catch (Exception e) {
String msg = "Error retrieving primary certificate for tenant : " + tenantDomain;
throw new IdentityProviderManagementException(msg, e);
} finally {
PrivilegedCarbonContext.endTenantFlow();
}
if (cert == null) {
throw new IdentityProviderManagementException(
"Cannot find the primary certificate for tenant " + tenantDomain);
}
try {
identityProvider.setCertificate(Base64.encode(cert.getEncoded()));
} catch (CertificateEncodingException e) {
String msg = "Error occurred while encoding primary certificate for tenant domain " + tenantDomain;
throw new IdentityProviderManagementException(msg, e);
}
List<FederatedAuthenticatorConfig> fedAuthnCofigs = new ArrayList<FederatedAuthenticatorConfig>();
List<Property> propertiesList = null;
FederatedAuthenticatorConfig openIdFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.OpenID.NAME);
if (openIdFedAuthn == null) {
openIdFedAuthn = new FederatedAuthenticatorConfig();
openIdFedAuthn.setName(IdentityApplicationConstants.Authenticator.OpenID.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(openIdFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(openIdFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OpenID.OPEN_ID_URL) == null) {
Property openIdUrlProp = new Property();
openIdUrlProp.setName(IdentityApplicationConstants.Authenticator.OpenID.OPEN_ID_URL);
openIdUrlProp.setValue(openIdUrl);
propertiesList.add(openIdUrlProp);
}
openIdFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(openIdFedAuthn);
FederatedAuthenticatorConfig saml2SSOFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
if (saml2SSOFedAuthn == null) {
saml2SSOFedAuthn = new FederatedAuthenticatorConfig();
saml2SSOFedAuthn.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
}
propertiesList = new ArrayList<>();
Property samlSSOUrlProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL);
if (samlSSOUrlProperty == null) {
samlSSOUrlProperty = new Property();
samlSSOUrlProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL);
// Set the generated saml sso endpoint value.
samlSSOUrlProperty.setValue(samlSSOUrl);
}
propertiesList.add(samlSSOUrlProperty);
Property samlLogoutUrlProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL);
if (samlLogoutUrlProperty == null) {
samlLogoutUrlProperty = new Property();
samlLogoutUrlProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL);
// Set the generated saml slo endpoint value.
samlLogoutUrlProperty.setValue(samlLogoutUrl);
}
propertiesList.add(samlLogoutUrlProperty);
Property samlECPUrlProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL);
if (samlECPUrlProperty == null) {
samlECPUrlProperty = new Property();
samlECPUrlProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL);
}
//set the generated saml ecp endpoint value
samlECPUrlProperty.setValue(samlECPUrl);
propertiesList.add(samlECPUrlProperty);
Property idPEntityIdProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
if (idPEntityIdProperty == null) {
idPEntityIdProperty = new Property();
idPEntityIdProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
idPEntityIdProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
}
propertiesList.add(idPEntityIdProperty);
Property samlArtifactUrlProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.ARTIFACT_RESOLVE_URL);
if (samlArtifactUrlProperty == null) {
samlArtifactUrlProperty = new Property();
samlArtifactUrlProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.ARTIFACT_RESOLVE_URL);
}
samlArtifactUrlProperty.setValue(samlArtifactUrl);
propertiesList.add(samlArtifactUrlProperty);
for (Property property : saml2SSOFedAuthn.getProperties()) {
if (property != null &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.getName())) {
propertiesList.add(property);
}
}
Property samlMetadataValidityPeriodProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
if (samlMetadataValidityPeriodProperty == null) {
samlMetadataValidityPeriodProperty = new Property();
samlMetadataValidityPeriodProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
samlMetadataValidityPeriodProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT);
}
propertiesList.add(samlMetadataValidityPeriodProperty);
Property samlMetadataSigningEnabledProperty = IdentityApplicationManagementUtil.getProperty(saml2SSOFedAuthn.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
if (samlMetadataSigningEnabledProperty == null) {
samlMetadataSigningEnabledProperty = new Property();
samlMetadataSigningEnabledProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
samlMetadataSigningEnabledProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED_DEFAULT);
}
propertiesList.add(samlMetadataSigningEnabledProperty);
saml2SSOFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(saml2SSOFedAuthn);
FederatedAuthenticatorConfig oauth1FedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.OAuth10A.NAME);
if (oauth1FedAuthn == null) {
oauth1FedAuthn = new FederatedAuthenticatorConfig();
oauth1FedAuthn.setName(IdentityApplicationConstants.OAuth10A.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(oauth1FedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_REQUEST_TOKEN_URL) == null) {
Property oauth1ReqTokUrlProp = new Property();
oauth1ReqTokUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_REQUEST_TOKEN_URL);
oauth1ReqTokUrlProp.setValue(oauth1RequestTokenUrl);
propertiesList.add(oauth1ReqTokUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_AUTHORIZE_URL) == null) {
Property oauth1AuthzUrlProp = new Property();
oauth1AuthzUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_AUTHORIZE_URL);
oauth1AuthzUrlProp.setValue(oauth1AuthorizeUrl);
propertiesList.add(oauth1AuthzUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_ACCESS_TOKEN_URL) == null) {
Property oauth1AccessTokUrlProp = new Property();
oauth1AccessTokUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_ACCESS_TOKEN_URL);
oauth1AccessTokUrlProp.setValue(oauth1AccessTokenUrl);
propertiesList.add(oauth1AccessTokUrlProp);
}
oauth1FedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(oauth1FedAuthn);
FederatedAuthenticatorConfig oidcFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.OIDC.NAME);
if (oidcFedAuthn == null) {
oidcFedAuthn = new FederatedAuthenticatorConfig();
oidcFedAuthn.setName(IdentityApplicationConstants.Authenticator.OIDC.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(oidcFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
OPENID_IDP_ENTITY_ID) == null) {
Property idPEntityIdProp = new Property();
idPEntityIdProp.setName(OPENID_IDP_ENTITY_ID);
idPEntityIdProp.setValue(getOIDCResidentIdPEntityId());
propertiesList.add(idPEntityIdProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_AUTHZ_URL) == null) {
Property authzUrlProp = new Property();
authzUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_AUTHZ_URL);
authzUrlProp.setValue(oauth2AuthzEPUrl);
propertiesList.add(authzUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_TOKEN_URL) == null) {
Property tokenUrlProp = new Property();
tokenUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_TOKEN_URL);
tokenUrlProp.setValue(oauth2TokenEPUrl);
propertiesList.add(tokenUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_REVOKE_URL) == null) {
Property revokeUrlProp = new Property();
revokeUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_REVOKE_URL);
revokeUrlProp.setValue(oauth2RevokeEPUrl);
propertiesList.add(revokeUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_INTROSPECT_URL) == null) {
Property instropsectUrlProp = new Property();
instropsectUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_INTROSPECT_URL);
instropsectUrlProp.setValue(oauth2IntrospectEpUrl);
propertiesList.add(instropsectUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_USER_INFO_EP_URL) == null) {
Property userInfoUrlProp = new Property();
userInfoUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_USER_INFO_EP_URL);
userInfoUrlProp.setValue(oauth2UserInfoEPUrl);
propertiesList.add(userInfoUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OIDC_CHECK_SESSION_URL) == null) {
Property checkSessionUrlProp = new Property();
checkSessionUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OIDC_CHECK_SESSION_URL);
checkSessionUrlProp.setValue(oidcCheckSessionEPUrl);
propertiesList.add(checkSessionUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OIDC_LOGOUT_URL) == null) {
Property logoutUrlProp = new Property();
logoutUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OIDC_LOGOUT_URL);
logoutUrlProp.setValue(oidcLogoutEPUrl);
propertiesList.add(logoutUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_DCR_EP_URL) == null) {
Property dcrUrlProp = new Property();
dcrUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_DCR_EP_URL);
dcrUrlProp.setValue(oAuth2DCREPUrl);
propertiesList.add(dcrUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OIDC_WEB_FINGER_EP_URL) == null) {
Property webFingerUrlProp = new Property();
webFingerUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OIDC_WEB_FINGER_EP_URL);
webFingerUrlProp.setValue(oIDCWebFingerEPUrl);
propertiesList.add(webFingerUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_JWKS_EP_URL) == null) {
Property jwksUrlProp = new Property();
jwksUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_JWKS_EP_URL);
jwksUrlProp.setValue(oAuth2JWKSPage);
propertiesList.add(jwksUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oidcFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OIDC.OIDC_DISCOVERY_EP_URL) == null) {
Property discoveryUrlProp = new Property();
discoveryUrlProp.setName(IdentityApplicationConstants.Authenticator.OIDC.OIDC_DISCOVERY_EP_URL);
discoveryUrlProp.setValue(oIDCDiscoveryEPUrl);
propertiesList.add(discoveryUrlProp);
}
oidcFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(oidcFedAuthn);
FederatedAuthenticatorConfig passiveSTSFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
if (passiveSTSFedAuthn == null) {
passiveSTSFedAuthn = new FederatedAuthenticatorConfig();
passiveSTSFedAuthn.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
}
propertiesList = new ArrayList<>();
Property passiveSTSUrlProperty = IdentityApplicationManagementUtil.getProperty(passiveSTSFedAuthn
.getProperties(), IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL);
if (passiveSTSUrlProperty == null) {
passiveSTSUrlProperty = new Property();
passiveSTSUrlProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL);
}
passiveSTSUrlProperty.setValue(passiveStsUrl);
propertiesList.add(passiveSTSUrlProperty);
Property stsIdPEntityIdProperty = IdentityApplicationManagementUtil.getProperty(passiveSTSFedAuthn
.getProperties(), IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_ENTITY_ID);
if (stsIdPEntityIdProperty == null) {
stsIdPEntityIdProperty = new Property();
stsIdPEntityIdProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS
.IDENTITY_PROVIDER_ENTITY_ID);
stsIdPEntityIdProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
}
propertiesList.add(stsIdPEntityIdProperty);
for (Property property : passiveSTSFedAuthn.getProperties()) {
if (property != null && !IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL
.equals(property.getName()) && !IdentityApplicationConstants.Authenticator.PassiveSTS
.IDENTITY_PROVIDER_ENTITY_ID.equals(property.getName())) {
propertiesList.add(property);
}
}
passiveSTSFedAuthn
.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(passiveSTSFedAuthn);
FederatedAuthenticatorConfig stsFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.WSTrust.NAME);
if (stsFedAuthn == null) {
stsFedAuthn = new FederatedAuthenticatorConfig();
stsFedAuthn.setName(IdentityApplicationConstants.Authenticator.WSTrust.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(stsFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(stsFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.WSTrust.IDENTITY_PROVIDER_URL) == null) {
Property stsUrlProp = new Property();
stsUrlProp.setName(IdentityApplicationConstants.Authenticator.WSTrust.IDENTITY_PROVIDER_URL);
stsUrlProp.setValue(stsUrl);
propertiesList.add(stsUrlProp);
}
stsFedAuthn
.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(stsFedAuthn);
List<IdentityProviderProperty> identityProviderProperties = new ArrayList<IdentityProviderProperty>();
FederatedAuthenticatorConfig sessionTimeoutConfig = new FederatedAuthenticatorConfig();
sessionTimeoutConfig.setName(IdentityApplicationConstants.NAME);
propertiesList = new ArrayList<Property>(Arrays.asList(sessionTimeoutConfig.getProperties()));
Property cleanUpPeriodProp = new Property();
cleanUpPeriodProp.setName(IdentityApplicationConstants.CLEAN_UP_PERIOD);
String cleanUpPeriod = IdentityUtil.getProperty(IdentityConstants.ServerConfig.CLEAN_UP_PERIOD);
if (StringUtils.isBlank(cleanUpPeriod)) {
cleanUpPeriod = IdentityApplicationConstants.CLEAN_UP_PERIOD_DEFAULT;
} else if (!StringUtils.isNumeric(cleanUpPeriod)) {
log.warn("PersistanceCleanUpPeriod in identity.xml should be a numeric value");
cleanUpPeriod = IdentityApplicationConstants.CLEAN_UP_PERIOD_DEFAULT;
}
cleanUpPeriodProp.setValue(cleanUpPeriod);
propertiesList.add(cleanUpPeriodProp);
sessionTimeoutConfig.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(sessionTimeoutConfig);
identityProvider.setFederatedAuthenticatorConfigs(fedAuthnCofigs
.toArray(new FederatedAuthenticatorConfig[fedAuthnCofigs.size()]));
ProvisioningConnectorConfig scimProvConn = IdentityApplicationManagementUtil
.getProvisioningConnector(identityProvider.getProvisioningConnectorConfigs(),
"scim");
if (scimProvConn == null) {
scimProvConn = new ProvisioningConnectorConfig();
scimProvConn.setName("scim");
}
propertiesList = new ArrayList<>(Arrays.asList(scimProvConn.getProvisioningProperties()));
Property scimUserEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM.USERS_EP_URL);
if (scimUserEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM.USERS_EP_URL);
property.setValue(scimUsersEndpoint);
propertiesList.add(property);
} else if (!scimUsersEndpoint.equalsIgnoreCase(scimUserEndpointProperty.getValue())) {
scimUserEndpointProperty.setValue(scimUsersEndpoint);
}
Property scimGroupEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM.GROUPS_EP_URL);
if (scimGroupEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM.GROUPS_EP_URL);
property.setValue(scimGroupsEndpoint);
propertiesList.add(property);
} else if (!scimGroupsEndpoint.equalsIgnoreCase(scimGroupEndpointProperty.getValue())) {
scimGroupEndpointProperty.setValue(scimGroupsEndpoint);
}
Property scim2UserEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM2.USERS_EP_URL);
if (scim2UserEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM2.USERS_EP_URL);
property.setValue(scim2UsersEndpoint);
propertiesList.add(property);
} else if (!scim2UsersEndpoint.equalsIgnoreCase(scim2UserEndpointProperty.getValue())) {
scim2UserEndpointProperty.setValue(scim2UsersEndpoint);
}
Property scim2GroupEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM2.GROUPS_EP_URL);
if (scim2GroupEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM2.GROUPS_EP_URL);
property.setValue(scim2GroupsEndpoint);
propertiesList.add(property);
} else if (!scim2GroupsEndpoint.equalsIgnoreCase(scim2GroupEndpointProperty.getValue())) {
scim2GroupEndpointProperty.setValue(scim2GroupsEndpoint);
}
scimProvConn.setProvisioningProperties(propertiesList.toArray(new Property[propertiesList.size()]));
identityProvider.setProvisioningConnectorConfigs(new ProvisioningConnectorConfig[]{scimProvConn});
// Override few endpoint URLs which are initially persisted in the database and can be out dated with hostname
// changes.
overrideResidentIdpEPUrls(identityProvider);
return identityProvider;
}
/**
* Add Resident Identity provider for a given tenant
*
* @param identityProvider <code>IdentityProvider</code>
* @param tenantDomain Tenant domain whose resident IdP is requested
* @throws IdentityProviderManagementException Error when adding Resident Identity Provider
*/
@Override
public void addResidentIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreAddResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
if (identityProvider.getFederatedAuthenticatorConfigs() == null) {
identityProvider.setFederatedAuthenticatorConfigs(new FederatedAuthenticatorConfig[0]);
}
FederatedAuthenticatorConfig saml2SSOResidentAuthenticatorConfig = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
if (saml2SSOResidentAuthenticatorConfig == null) {
saml2SSOResidentAuthenticatorConfig = new FederatedAuthenticatorConfig();
saml2SSOResidentAuthenticatorConfig.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
}
if (saml2SSOResidentAuthenticatorConfig.getProperties() == null) {
saml2SSOResidentAuthenticatorConfig.setProperties(new Property[0]);
}
boolean idPEntityIdAvailable = false;
for (Property property : saml2SSOResidentAuthenticatorConfig.getProperties()) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.getName())) {
idPEntityIdAvailable = true;
}
}
if (!idPEntityIdAvailable) {
Property property = new Property();
property.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
property.setValue(IdPManagementUtil.getResidentIdPEntityId());
if (saml2SSOResidentAuthenticatorConfig.getProperties().length > 0) {
List<Property> properties = Arrays.asList(saml2SSOResidentAuthenticatorConfig.getProperties());
properties.add(property);
saml2SSOResidentAuthenticatorConfig.setProperties((Property[]) properties.toArray());
} else {
saml2SSOResidentAuthenticatorConfig.setProperties(new Property[]{property});
}
}
Property samlMetadataValidityPeriodProperty = new Property();
String samlMetadataValidityPeriod = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_VALIDITY_PERIOD);
if (StringUtils.isBlank(samlMetadataValidityPeriod)) {
samlMetadataValidityPeriod = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT;
} else if (!StringUtils.isNumeric(samlMetadataValidityPeriod) ||
Integer.parseInt(samlMetadataValidityPeriod) <= 0) {
log.warn("SAMLMetadataValidityPeriod in identity.xml should be a numeric value " +
"hence defaulting to value: " + IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT + "m");
samlMetadataValidityPeriod = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT;
}
samlMetadataValidityPeriodProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
samlMetadataValidityPeriodProperty.setValue(samlMetadataValidityPeriod);
Property samlMetadataSigningEnabledProperty = new Property();
String samlMetadataSigningEnabled = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_SIGNING_ENABLED);
if (StringUtils.isBlank(samlMetadataSigningEnabled)) {
log.warn("SAMLMetadataSigningEnabled in identity.xml should be a boolean value");
samlMetadataSigningEnabled = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED_DEFAULT;
}
samlMetadataSigningEnabledProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
samlMetadataSigningEnabledProperty.setValue(samlMetadataSigningEnabled);
List<Property> propertyList = new ArrayList<>(Arrays.asList(saml2SSOResidentAuthenticatorConfig.getProperties()));
propertyList.add(samlMetadataValidityPeriodProperty);
propertyList.add(samlMetadataSigningEnabledProperty);
Property[] properties = new Property[propertyList.size()];
properties = propertyList.toArray(properties);
saml2SSOResidentAuthenticatorConfig.setProperties(properties);
Property oidcProperty = new Property();
oidcProperty.setName(OPENID_IDP_ENTITY_ID);
oidcProperty.setValue(getOIDCResidentIdPEntityId());
FederatedAuthenticatorConfig oidcAuthenticationConfig = new FederatedAuthenticatorConfig();
oidcAuthenticationConfig.setProperties(new Property[]{oidcProperty});
oidcAuthenticationConfig.setName(IdentityApplicationConstants.Authenticator.OIDC.NAME);
Property passiveStsProperty = new Property();
passiveStsProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_ENTITY_ID);
passiveStsProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
FederatedAuthenticatorConfig passiveStsAuthenticationConfig = new FederatedAuthenticatorConfig();
passiveStsAuthenticationConfig.setProperties(new Property[]{passiveStsProperty});
passiveStsAuthenticationConfig.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs = {saml2SSOResidentAuthenticatorConfig,
passiveStsAuthenticationConfig, oidcAuthenticationConfig};
identityProvider.setFederatedAuthenticatorConfigs(IdentityApplicationManagementUtil
.concatArrays(identityProvider.getFederatedAuthenticatorConfigs(), federatedAuthenticatorConfigs));
IdentityProviderProperty[] idpProperties = new IdentityProviderProperty[2];
IdentityProviderProperty rememberMeTimeoutProperty = new IdentityProviderProperty();
String rememberMeTimeout = IdentityUtil.getProperty(IdentityConstants.ServerConfig.REMEMBER_ME_TIME_OUT);
if (StringUtils.isBlank(rememberMeTimeout) || !StringUtils.isNumeric(rememberMeTimeout) ||
Integer.parseInt(rememberMeTimeout) <= 0) {
log.warn("RememberMeTimeout in identity.xml should be a numeric value");
rememberMeTimeout = IdentityApplicationConstants.REMEMBER_ME_TIME_OUT_DEFAULT;
}
rememberMeTimeoutProperty.setName(IdentityApplicationConstants.REMEMBER_ME_TIME_OUT);
rememberMeTimeoutProperty.setValue(rememberMeTimeout);
IdentityProviderProperty sessionIdletimeOutProperty = new IdentityProviderProperty();
String idleTimeout = IdentityUtil.getProperty(IdentityConstants.ServerConfig.SESSION_IDLE_TIMEOUT);
if (StringUtils.isBlank(idleTimeout) || !StringUtils.isNumeric(idleTimeout) ||
Integer.parseInt(idleTimeout) <= 0) {
log.warn("SessionIdleTimeout in identity.xml should be a numeric value");
idleTimeout = IdentityApplicationConstants.SESSION_IDLE_TIME_OUT_DEFAULT;
}
sessionIdletimeOutProperty.setName(IdentityApplicationConstants.SESSION_IDLE_TIME_OUT);
sessionIdletimeOutProperty.setValue(idleTimeout);
idpProperties[0] = rememberMeTimeoutProperty;
idpProperties[1] = sessionIdletimeOutProperty;
identityProvider.setIdpProperties(idpProperties);
dao.addIdP(identityProvider, IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostAddResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
}
/**
* Update Resident Identity provider for a given tenant
*
* @param identityProvider <code>IdentityProvider</code>
* @param tenantDomain Tenant domain whose resident IdP is requested
* @throws IdentityProviderManagementException Error when updating Resident Identity Provider
*/
@Override
public void updateResidentIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider residentIdp = dao.getIdPByName(null, IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME,
IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
Map<String, String> configurationDetails = new HashMap<>();
for (IdentityProviderProperty property : identityProvider.getIdpProperties()) {
configurationDetails.put(property.getName(), property.getValue());
}
IdentityProviderProperty[] identityMgtProperties = residentIdp.getIdpProperties();
List<IdentityProviderProperty> newProperties = new ArrayList<>();
for (IdentityProviderProperty identityMgtProperty : identityMgtProperties) {
IdentityProviderProperty prop = new IdentityProviderProperty();
String key = identityMgtProperty.getName();
prop.setName(key);
if (configurationDetails.containsKey(key)) {
prop.setValue(configurationDetails.get(key));
} else {
prop.setValue(identityMgtProperty.getValue());
}
newProperties.add(prop);
configurationDetails.remove(key);
}
for (Map.Entry<String, String> entry : configurationDetails.entrySet()) {
IdentityProviderProperty prop = new IdentityProviderProperty();
prop.setName(entry.getKey());
prop.setValue(entry.getValue());
newProperties.add(prop);
}
identityProvider.setIdpProperties(newProperties.toArray(new IdentityProviderProperty[newProperties.size()]));
for (IdentityProviderProperty idpProp : identityProvider.getIdpProperties()) {
if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.SESSION_IDLE_TIME_OUT)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.SESSION_IDLE_TIME_OUT
+ " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.REMEMBER_ME_TIME_OUT)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.REMEMBER_ME_TIME_OUT
+ " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD + " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED)) {
if (StringUtils.isBlank(idpProp.getValue())) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED + " of ResidentIdP should be a boolean value ");
}
}
}
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
if (identityProvider.getFederatedAuthenticatorConfigs() == null) {
identityProvider.setFederatedAuthenticatorConfigs(new FederatedAuthenticatorConfig[0]);
}
IdentityProvider currentIdP = IdentityProviderManager.getInstance().getIdPByName(
IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME, tenantDomain, true);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
validateUpdateOfIdPEntityId(currentIdP.getFederatedAuthenticatorConfigs(),
identityProvider.getFederatedAuthenticatorConfigs(), tenantId, tenantDomain);
dao.updateIdP(identityProvider, currentIdP, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
}
/**
* Retrieves registered Identity finally {
* break;
* }providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getIdPs(String tenantDomain)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPs(null, tenantId, tenantDomain);
}
/**
* Get all basic identity provider information.
*
* @param limit limit per page.
* @param offset offset value.
* @param filter filter value for IdP search.
* @param sortOrder order of IdP ASC/DESC.
* @param sortBy the column value need to sort.
* @param tenantDomain tenant domain whose IdP names are requested.
* @return Identity Provider's Basic Information array {@link IdpSearchResult}.
* @throws IdentityProviderManagementServerException server related error while getting Identity Providers object.
* @throws IdentityProviderManagementClientException client related error while getting Identity Providers object.
*/
@Override
public IdpSearchResult getIdPs(Integer limit, Integer offset, String filter, String sortOrder, String sortBy,
String tenantDomain)
throws IdentityProviderManagementServerException, IdentityProviderManagementClientException {
nullCheck(limit, offset);
if (limit < 0) {
String message = "Limit should be negative value. limit:" + limit;
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
IdpSearchResult result = new IdpSearchResult();
List<ExpressionNode> expressionNodes = getExpressionNodes(filter);
setParameters(limit, offset, sortOrder, sortBy, filter, result);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
result.setIdpCount(dao.getTotalIdPCount(tenantId, expressionNodes));
result.setIdpList(dao.getIdPsSearch(tenantId, expressionNodes, result.getLimit(), result.getOffSet(),
result.getSortOrder(), result.getSortBy()));
return result;
}
/**
* Check null for limit and offset.
*
* @param limit limit per page.
* @param offset offset value.
* @throws IdentityProviderManagementClientException Error while limit and offset getting null.
*/
private void nullCheck(Integer limit, Integer offset) throws IdentityProviderManagementClientException {
if (limit == null) {
String message = "Limit should not null";
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
if (offset == null) {
String message = "Offset should not null";
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
}
/**
* Get the filter node as a list.
*
* @param filter value of the filter.
* @return node tree.
* @throws IdentityProviderManagementClientException Error when validate filters.
*/
private List<ExpressionNode> getExpressionNodes(String filter) throws IdentityProviderManagementClientException {
// Filter example : name sw "te" and name ew "st" and isEnabled eq "true".
List<ExpressionNode> expressionNodes = new ArrayList<>();
FilterTreeBuilder filterTreeBuilder;
try {
filterTreeBuilder = new FilterTreeBuilder(filter);
Node rootNode = filterTreeBuilder.buildTree();
setExpressionNodeList(rootNode, expressionNodes);
} catch (IOException | IdentityException e) {
String message = "Error occurred while validate filter, filter: " + filter;
throw IdPManagementUtil
.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP, message, e);
}
return expressionNodes;
}
/**
* Set the node values as list of expression.
*
* @param node filter node.
* @param expression list of expression.
* @throws IdentityProviderManagementClientException Error when passing invalid filter.
*/
private void setExpressionNodeList(Node node, List<ExpressionNode> expression)
throws IdentityProviderManagementClientException {
if (node instanceof ExpressionNode) {
if (((ExpressionNode) node).getAttributeValue().contains(IdPManagementConstants.IDP_IS_ENABLED)) {
if ("true".contains(((ExpressionNode) node).getValue())) {
((ExpressionNode) node).setValue(IdPManagementConstants.IS_TRUE_VALUE);
} else if ("false".contains(((ExpressionNode) node).getValue())) {
((ExpressionNode) node).setValue(IdPManagementConstants.IS_FALSE_VALUE);
} else {
String message = "Invalid \'isEnabled\' value passed in the filter. It should be \'true\' or " +
"\'false\' isEnabled = " + ((ExpressionNode) node).getValue();
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
}
expression.add((ExpressionNode) node);
} else if (node instanceof OperationNode) {
setExpressionNodeList(node.getLeftNode(), expression);
setExpressionNodeList(node.getRightNode(), expression);
}
}
/**
* Set the passing parameters as result.
*
* @param limit page limit.
* @param offset offset value.
* @param filter filter value for IdP search.
* @param sortOrder order of IdP(ASC/DESC).
* @param sortBy the column value need to sort.
* @param result result object.
*/
private void setParameters(int limit, int offset, String filter, String sortOrder, String sortBy, IdpSearchResult
result) {
result.setLimit(validateLimit(limit));
result.setOffSet(validateOffset(offset));
result.setSortBy(validateSortBy(sortBy));
result.setSortOrder(validateSortOrder(sortOrder));
result.setFilter(filter);
}
/**
* Validate sortBy.
*
* @param sortBy sortBy attribute.
* @return Validated sortOrder and sortBy.
*/
private String validateSortBy(String sortBy) {
if (StringUtils.isBlank(sortBy)) {
if (log.isDebugEnabled()) {
log.debug("sortBy attribute is empty. Therefore we set the default sortBy attribute. sortBy" + sortBy);
}
return IdPManagementConstants.DEFAULT_SORT_BY;
} else {
switch (sortBy) {
case IdPManagementConstants.IDP_NAME:
sortBy = IdPManagementConstants.NAME;
break;
case IdPManagementConstants.IDP_HOME_REALM_ID:
sortBy = IdPManagementConstants.HOME_REALM_ID;
break;
default:
sortBy = IdPManagementConstants.DEFAULT_SORT_BY;
if (log.isDebugEnabled()) {
log.debug("sortBy attribute is incorrect. Therefore we set the default sortBy attribute. " +
"sortBy: " + IdPManagementConstants.DEFAULT_SORT_BY);
}
break;
}
return sortBy;
}
}
/**
* Validate sortOrder.
*
* @param sortOrder sortOrder ASC/DESC.
* @return Validated sortOrder and sortBy.
*/
private String validateSortOrder(String sortOrder) {
if (StringUtils.isBlank(sortOrder)) {
sortOrder = IdPManagementConstants.DEFAULT_SORT_ORDER;
if (log.isDebugEnabled()) {
log.debug("sortOrder is empty. Therefore we set the default sortOrder value as ASC. SortOrder: " +
sortOrder);
}
} else if (sortOrder.equals(IdPManagementConstants.DESC_SORT_ORDER)) {
sortOrder = IdPManagementConstants.DESC_SORT_ORDER;
} else if (sortOrder.equals(IdPManagementConstants.ASC_SORT_ORDER)) {
sortOrder = IdPManagementConstants.ASC_SORT_ORDER;
} else {
sortOrder = IdPManagementConstants.DEFAULT_SORT_ORDER;
if (log.isDebugEnabled()) {
log.debug("sortOrder is incorrect. Therefore we set the default sortOrder value as ASC. SortOrder: "
+ sortOrder);
}
}
return sortOrder;
}
/**
* Validate limit.
*
* @param limit given limit value.
* @return validated limit and offset value.
*/
private int validateLimit(int limit) {
if (limit > IdPManagementConstants.MAXIMUM_LIMIT_PER_PAGE) {
try {
String itemsPerPagePropertyValue = ServerConfiguration.getInstance()
.getFirstProperty(IdPManagementConstants.ITEMS_PER_PAGE_PROPERTY);
if (log.isDebugEnabled()) {
log.debug("Given limit exceed the maximum limit. Therefore we get the default limit from " +
"carbon.xml. limit: " + limit);
}
if (StringUtils.isNotBlank(itemsPerPagePropertyValue)) {
limit = Integer.parseInt(itemsPerPagePropertyValue);
} else {
limit = IdPManagementConstants.MAXIMUM_LIMIT_PER_PAGE;
if (log.isDebugEnabled()) {
log.debug("limit is incorrect. Therefore we set the default limit. limit:" + limit);
}
}
} catch (NumberFormatException e) {
limit = IdPManagementConstants.DEFAULT_RESULTS_PER_PAGE;
log.warn("Error occurred while parsing the 'ItemsPerPage' property value in carbon.xml.", e);
}
}
return limit;
}
/**
* Validate offset.
*
* @param offset given offset value.
* @return validated limit and offset value.
*/
private int validateOffset(int offset) {
if (offset < 0) {
offset = 0;
if (log.isDebugEnabled()) {
log.debug("Invalid offset applied. Therefore we set the default offset value as 0. offSet: " + offset);
}
}
return offset;
}
/**
* Retrieves registered Identity finally {
* break;
* }providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getIdPsSearch(String tenantDomain, String filter)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPsSearch(null, tenantId, tenantDomain, filter);
}
/**
* Retrieves registered Enabled Identity providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getEnabledIdPs(String tenantDomain)
throws IdentityProviderManagementException {
List<IdentityProvider> enabledIdentityProviders = new ArrayList<IdentityProvider>();
List<IdentityProvider> identityProviers = getIdPs(tenantDomain);
for (IdentityProvider idp : identityProviers) {
if (idp.isEnable()) {
enabledIdentityProviders.add(idp);
}
}
return enabledIdentityProviders;
}
/**
* @param idPName
* @param tenantDomain
* @param ignoreFileBasedIdps
* @return
* @throws IdentityProviderManagementException
*/
@Override
public IdentityProvider getIdPByName(String idPName, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (!ignoreFileBasedIdps) {
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
}
return identityProvider;
}
@Override
public IdentityProvider getIdPById(String id, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
if (StringUtils.isEmpty(id)) {
String msg = "Invalid argument: Identity Provider ID value is empty";
throw new IdentityProviderManagementException(msg);
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
Integer intId;
IdentityProvider identityProvider = null;
try {
intId = Integer.parseInt(id);
identityProvider = dao.getIdPById(null, intId, tenantId, tenantDomain);
} catch (NumberFormatException e) {
// Ignore this.
}
if (!ignoreFileBasedIdps) {
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(id, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
}
return identityProvider;
}
@Override
public IdentityProvider getIdPByResourceId(String resourceId, String tenantDomain, boolean
ignoreFileBasedIdps) throws IdentityProviderManagementException {
validateGetIdPInputValues(resourceId);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPByResourceId(resourceId, tenantId, tenantDomain);
}
/**
* @param idPName
* @param tenantDomain
* @param ignoreFileBasedIdps
* @return
* @throws IdentityProviderManagementException
*/
@Override
public IdentityProvider getEnabledIdPByName(String idPName, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByName(idPName, tenantDomain, ignoreFileBasedIdps);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant by Identity Provider name
*
* @param idPName Unique name of the Identity provider of whose information is requested
* @param tenantDomain Tenant domain whose information is requested
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP name
*/
@Override
public IdentityProvider getIdPByName(String idPName, String tenantDomain)
throws IdentityProviderManagementException {
return getIdPByName(idPName, tenantDomain, false);
}
@Override
public IdentityProvider getIdPById(String id, String tenantDomain) throws IdentityProviderManagementException {
return getIdPById(id, tenantDomain, false);
}
/**
* @param property IDP authenticator property (E.g.: IdPEntityId)
* @param value Value associated with given Property
* @param tenantDomain
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by authenticator property value
*/
@Override
public IdentityProvider getIdPByAuthenticatorPropertyValue(String property, String value, String tenantDomain,
boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value)) {
String msg = "Invalid argument: Authenticator property or property value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByAuthenticatorPropertyValue(
null, property, value, tenantId, tenantDomain);
if (identityProvider == null && !ignoreFileBasedIdps) {
identityProvider = new FileBasedIdPMgtDAO()
.getIdPByAuthenticatorPropertyValue(property, value, tenantDomain);
}
return identityProvider;
}
/**
* @param property IDP authenticator property (E.g.: IdPEntityId)
* @param value Value associated with given Property
* @param tenantDomain
* @param authenticator
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by authenticator property value
*/
public IdentityProvider getIdPByAuthenticatorPropertyValue(String property, String value, String tenantDomain,
String authenticator, boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value) || StringUtils.isEmpty(authenticator)) {
String msg = "Invalid argument: Authenticator property, property value or authenticator name is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByAuthenticatorPropertyValue(
null, property, value, authenticator, tenantId, tenantDomain);
if (identityProvider == null && !ignoreFileBasedIdps) {
identityProvider = new FileBasedIdPMgtDAO()
.getIdPByAuthenticatorPropertyValue(property, value, tenantDomain, authenticator);
}
return identityProvider;
}
/**
* Retrieves Enabled Identity provider information about a given tenant by Identity Provider name
*
* @param idPName Unique name of the Identity provider of whose information is requested
* @param tenantDomain Tenant domain whose information is requested
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP name
*/
@Override
public IdentityProvider getEnabledIdPByName(String idPName, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByName(idPName, tenantDomain);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant by realm identifier
*
* @param realmId Unique realm identifier of the Identity provider of whose information is
* requested
* @param tenantDomain Tenant domain whose information is requested
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP home realm identifier
*/
@Override
public IdentityProvider getIdPByRealmId(String realmId, String tenantDomain)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(realmId)) {
String msg = "Invalid argument: Identity Provider Home Realm Identifier value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByRealmId(realmId, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByRealmId(realmId, tenantDomain);
}
return identityProvider;
}
/**
* Retrieves Enabled Identity provider information about a given tenant by realm identifier
*
* @param realmId Unique realm identifier of the Identity provider of whose information is
* requested
* @param tenantDomain Tenant domain whose information is requested
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP home realm identifier
*/
@Override
public IdentityProvider getEnabledIdPByRealmId(String realmId, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByRealmId(realmId, tenantDomain);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given IdP claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param idPClaimURIs IdP claim URIs which need to be mapped to tenant's local claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Set<ClaimMapping> getMappedLocalClaims(String idPName, String tenantDomain,
List<String> idPClaimURIs) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
ClaimConfig claimConfiguration = identityProvider.getClaimConfig();
if (claimConfiguration != null) {
ClaimMapping[] claimMappings = claimConfiguration.getClaimMappings();
if (claimMappings != null && claimMappings.length > 0 && idPClaimURIs != null) {
Set<ClaimMapping> returnSet = new HashSet<ClaimMapping>();
for (String idpClaim : idPClaimURIs) {
for (ClaimMapping claimMapping : claimMappings) {
if (claimMapping.getRemoteClaim().getClaimUri().equals(idpClaim)) {
returnSet.add(claimMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<ClaimMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given IdP claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param idPClaimURIs IdP claim URIs which need to be mapped to tenant's local claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Map<String, String> getMappedLocalClaimsMap(String idPName, String tenantDomain,
List<String> idPClaimURIs) throws
IdentityProviderManagementException {
Set<ClaimMapping> claimMappings = getMappedLocalClaims(idPName, tenantDomain, idPClaimURIs);
Map<String, String> returnMap = new HashMap<String, String>();
for (ClaimMapping claimMapping : claimMappings) {
returnMap.put(claimMapping.getRemoteClaim().getClaimUri(), claimMapping.getLocalClaim()
.getClaimUri());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given local claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param localClaimURIs Local claim URIs which need to be mapped to IdP's claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Set<ClaimMapping> getMappedIdPClaims(String idPName, String tenantDomain,
List<String> localClaimURIs) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
ClaimConfig claimConfiguration = identityProvider.getClaimConfig();
if (claimConfiguration != null) {
ClaimMapping[] claimMappings = claimConfiguration.getClaimMappings();
if (claimMappings != null && claimMappings.length > 0 && localClaimURIs != null) {
Set<ClaimMapping> returnSet = new HashSet<ClaimMapping>();
for (String localClaimURI : localClaimURIs) {
for (ClaimMapping claimMapping : claimMappings) {
if (claimMapping.getLocalClaim().getClaimUri().equals(localClaimURI)) {
returnSet.add(claimMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<ClaimMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given local claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param localClaimURIs Local claim URIs which need to be mapped to IdP's claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Map<String, String> getMappedIdPClaimsMap(String idPName, String tenantDomain,
List<String> localClaimURIs) throws
IdentityProviderManagementException {
Set<ClaimMapping> claimMappings = getMappedIdPClaims(idPName, tenantDomain, localClaimURIs);
Map<String, String> returnMap = new HashMap<String, String>();
for (ClaimMapping claimMapping : claimMappings) {
returnMap.put(claimMapping.getLocalClaim().getClaimUri(), claimMapping.getRemoteClaim()
.getClaimUri());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given IdP roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles to be mapped
* @param idPRoles IdP roles which need to be mapped to local roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Set<RoleMapping> getMappedLocalRoles(String idPName, String tenantDomain,
String[] idPRoles) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
PermissionsAndRoleConfig roleConfiguration = identityProvider.getPermissionAndRoleConfig();
if (roleConfiguration != null) {
RoleMapping[] roleMappings = roleConfiguration.getRoleMappings();
if (roleMappings != null && roleMappings.length > 0 && idPRoles != null) {
Set<RoleMapping> returnSet = new HashSet<RoleMapping>();
for (String idPRole : idPRoles) {
for (RoleMapping roleMapping : roleMappings) {
if (roleMapping.getRemoteRole().equals(idPRole)) {
returnSet.add(roleMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<RoleMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given IdP roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles to be mapped
* @param idPRoles IdP roles which need to be mapped to local roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Map<String, LocalRole> getMappedLocalRolesMap(String idPName, String tenantDomain,
String[] idPRoles) throws IdentityProviderManagementException {
Set<RoleMapping> roleMappings = getMappedLocalRoles(idPName, tenantDomain, idPRoles);
Map<String, LocalRole> returnMap = new HashMap<String, LocalRole>();
for (RoleMapping roleMapping : roleMappings) {
returnMap.put(roleMapping.getRemoteRole(), roleMapping.getLocalRole());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given local roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles need to be mapped
* @param localRoles Local roles which need to be mapped to IdP roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Set<RoleMapping> getMappedIdPRoles(String idPName, String tenantDomain,
LocalRole[] localRoles) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
PermissionsAndRoleConfig roleConfiguration = identityProvider.getPermissionAndRoleConfig();
if (roleConfiguration != null) {
RoleMapping[] roleMappings = roleConfiguration.getRoleMappings();
if (roleMappings != null && roleMappings.length > 0 && localRoles != null) {
Set<RoleMapping> returnSet = new HashSet<RoleMapping>();
for (LocalRole localRole : localRoles) {
for (RoleMapping roleMapping : roleMappings) {
if (roleMapping.getLocalRole().equals(localRole)) {
returnSet.add(roleMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<RoleMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given local roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles need to be mapped
* @param localRoles Local roles which need to be mapped to IdP roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Map<LocalRole, String> getMappedIdPRolesMap(String idPName, String tenantDomain,
LocalRole[] localRoles) throws
IdentityProviderManagementException {
Set<RoleMapping> roleMappings = getMappedIdPRoles(idPName, tenantDomain, localRoles);
Map<LocalRole, String> returnMap = new HashMap<LocalRole, String>();
for (RoleMapping roleMapping : roleMappings) {
returnMap.put(roleMapping.getLocalRole(), roleMapping.getRemoteRole());
}
return returnMap;
}
/**
* If metadata file is available, creates a new FederatedAuthenticatorConfig from that
*
* @param identityProvider
* @throws IdentityProviderManagementException
*/
private String handleMetadta(IdentityProvider identityProvider) throws IdentityProviderManagementException {
StringBuilder metadata = new StringBuilder();
if (IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().isEmpty()) {
throw new IdentityProviderManagementException("Metadata Converter is not set");
}
FederatedAuthenticatorConfig federatedAuthenticatorConfigs[] = identityProvider.getFederatedAuthenticatorConfigs();
for (int i = 0; i < federatedAuthenticatorConfigs.length; i++) {
Property properties[] = federatedAuthenticatorConfigs[i].getProperties();
if (ArrayUtils.isNotEmpty(properties)) {
for (int j = 0; j < properties.length; j++) {
if (properties[j] != null) {
if (properties[j].getName() != null && properties[j].getName().contains(IdPManagementConstants.META_DATA)) {
for (int v = 0; v < IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()
.size(); v++) {
MetadataConverter metadataConverter = IdpMgtServiceComponentHolder.getInstance()
.getMetadataConverters().get(v);
if (metadataConverter.canHandle(properties[j])) {
SAML2SSOMetadataConverter = metadataConverter;
try {
metadata.append(properties[j].getValue());
StringBuilder certificate = new StringBuilder("");
try {
FederatedAuthenticatorConfig metaFederated = metadataConverter.getFederatedAuthenticatorConfig(properties, certificate);
String spName = "";
for (int b = 0; b < properties.length; b++) {
if (properties[b] != null && properties[b].getName() != null &&
properties[b].getName().toString().equals(IdentityApplicationConstants.Authenticator.SAML2SSO.SP_ENTITY_ID)) {
spName = properties[b].getValue();
}
}
if (spName.equals("")) {
throw new IdentityProviderManagementException("SP name can't be empty");
}
if (metaFederated != null && ArrayUtils.isNotEmpty(metaFederated.getProperties())) {
for (int y = 0; y < metaFederated.getProperties().length; y++) {
if (metaFederated.getProperties()[y] != null && metaFederated.getProperties()[y].getName() != null
&& metaFederated.getProperties()[y].getName().toString().equals(IdentityApplicationConstants.Authenticator.SAML2SSO.SP_ENTITY_ID)) {
metaFederated.getProperties()[y].setValue(spName);
break;
}
}
}
if (metaFederated != null && metaFederated.getProperties() != null && metaFederated.getProperties().length > 0) {
federatedAuthenticatorConfigs[i].setProperties(metaFederated.getProperties());
} else {
throw new IdentityProviderManagementException("Error setting metadata using file");
}
} catch (IdentityProviderManagementException ex) {
throw new IdentityProviderManagementException("Error converting metadata", ex);
}
if (certificate.toString().length() > 0) {
identityProvider.setCertificate(certificate.toString());
}
} catch (XMLStreamException e) {
throw new IdentityProviderManagementException("Error while configuring metadata", e);
}
break;
}
}
}
}
}
}
}
return metadata.toString();
}
/**
* Adds an Identity Provider to the given tenant
*
* @param identityProvider new Identity Provider information
* @throws IdentityProviderManagementException Error when adding Identity Provider
* information
*/
@Override
public void addIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
addIdPWithResourceId(identityProvider, tenantDomain);
}
/**
* Adds an Identity Provider to the given tenant
*
* @param identityProvider new Identity Provider information
* @throws IdentityProviderManagementException Error when adding Identity Provider
* information
*/
@Override
public IdentityProvider addIdPWithResourceId(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
validateAddIdPInputValues(identityProvider.getIdentityProviderName(), tenantDomain);
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreAddIdP(identityProvider, tenantDomain)) {
return null;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (isPermissionAndRoleConfigExist(identityProvider)) {
verifyAndUpdateRoleConfiguration(tenantDomain, tenantId, identityProvider.getPermissionAndRoleConfig());
}
validateIdPEntityId(identityProvider.getFederatedAuthenticatorConfigs(), tenantId, tenantDomain);
String idpName = identityProvider.getIdentityProviderName();
String metadata = handleMetadta(identityProvider);
if (isMetadataFileExist(idpName, metadata)) {
if (SAML2SSOMetadataConverter != null) {
SAML2SSOMetadataConverter.saveMetadataString(tenantId, idpName, metadata);
} else {
String data = "Couldn't save metadata in registry.SAML2SSOMetadataConverter is not set.";
throw IdPManagementUtil.handleServerException(IdPManagementConstants.ErrorMessage.ERROR_CODE_ADD_IDP,
data);
}
}
identityProvider = dao.addIdP(identityProvider, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostAddIdP(identityProvider, tenantDomain)) {
return null;
}
}
return identityProvider;
}
/**
* Deletes an Identity Provider from a given tenant
*
* @param idPName Name of the IdP to be deleted
* @throws IdentityProviderManagementException Error when deleting Identity Provider
* information
*/
@Override
public void deleteIdP(String idPName, String tenantDomain) throws IdentityProviderManagementException {
IdentityProvider identityProvider = this.getIdPByName(idPName, tenantDomain, true);
if (identityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, idPName);
}
deleteIdPByResourceId(identityProvider.getResourceId(), tenantDomain);
}
/**
* Deletes an Identity Provider from a given tenant.
*
* @param resourceId Resource ID of the IdP to be deleted
* @throws IdentityProviderManagementException Error when deleting Identity Provider
* information
*/
@Override
public void deleteIdPByResourceId(String resourceId, String tenantDomain) throws
IdentityProviderManagementException {
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPByResourceId(resourceId, tenantDomain)) {
return;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
IdentityProvider idp = getIdPByResourceId(resourceId, tenantDomain, true);
if (SAML2SSOMetadataConverter != null) {
String idpName = null;
if (idp != null) {
idpName = idp.getIdentityProviderName();
}
SAML2SSOMetadataConverter.deleteMetadataString(tenantId, idpName);
}
dao.deleteIdPByResourceId(resourceId, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdPByResourceId(resourceId, idp, tenantDomain)) {
return;
}
}
}
/**
* Force delete an Identity Provider from a given tenant. This will remove any associations this Identity
* Provider has with any Service Providers in authentication steps or provisioning.
*
* @param idpName name of IDP to be deleted
* @param tenantDomain tenantDomain to which the IDP belongs to
*/
public void forceDeleteIdp(String idpName, String tenantDomain) throws IdentityProviderManagementException {
IdentityProvider identityProvider = this
.getIdPByName(idpName, tenantDomain, true);
if (identityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, idpName);
}
forceDeleteIdpByResourceId(identityProvider.getResourceId(), tenantDomain);
}
/**
* Force delete an Identity Provider from a given tenant. This will remove any associations this Identity
* Provider has with any Service Providers in authentication steps or provisioning.
*
* @param resourceId resource ID of IDP to be deleted
* @param tenantDomain tenantDomain to which the IDP belongs to
*/
public void forceDeleteIdpByResourceId(String resourceId, String tenantDomain) throws
IdentityProviderManagementException {
// Invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPByResourceId(resourceId, tenantDomain)) {
return;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
IdentityProvider idp = getIdPByResourceId(resourceId, tenantDomain, true);
if (SAML2SSOMetadataConverter != null) {
String idpName = null;
if (idp != null) {
idpName = idp.getIdentityProviderName();
}
SAML2SSOMetadataConverter.deleteMetadataString(tenantId, idpName);
}
dao.forceDeleteIdPByResourceId(resourceId, tenantId, tenantDomain);
// Invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdPByResourceId(resourceId, idp, tenantDomain)) {
return;
}
}
}
/**
* Updates a given Identity Provider information
*
* @param oldIdPName existing Identity Provider name
* @param newIdentityProvider new IdP information
* @throws IdentityProviderManagementException Error when updating Identity Provider
* information
*/
@Override
public void updateIdP(String oldIdPName, IdentityProvider newIdentityProvider,
String tenantDomain) throws IdentityProviderManagementException {
IdentityProvider currentIdentityProvider = this
.getIdPByName(oldIdPName, tenantDomain, true);
if (currentIdentityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, oldIdPName);
}
updateIdPByResourceId(currentIdentityProvider.getResourceId(), newIdentityProvider, tenantDomain);
}
/**
* Updates a given Identity Provider information
*
* @param resourceId existing Identity Provider resourceId
* @param newIdentityProvider new IdP information
* @param tenantDomain tenant domain of IDP.
* @throws IdentityProviderManagementException Error when updating Identity Provider
* information
*/
@Override
public IdentityProvider updateIdPByResourceId(String resourceId, IdentityProvider
newIdentityProvider, String tenantDomain) throws IdentityProviderManagementException {
IdentityProvider currentIdentityProvider = this
.getIdPByResourceId(resourceId, tenantDomain, true);
validateUpdateIdPInputValues(currentIdentityProvider, resourceId, newIdentityProvider.getIdentityProviderName());
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateIdPByResourceId(resourceId, newIdentityProvider,
tenantDomain)) {
return null;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (isPermissionAndRoleConfigExist(newIdentityProvider)) {
verifyAndUpdateRoleConfiguration(tenantDomain, tenantId, newIdentityProvider.getPermissionAndRoleConfig());
}
validateUpdateOfIdPEntityId(currentIdentityProvider.getFederatedAuthenticatorConfigs(),
newIdentityProvider.getFederatedAuthenticatorConfigs(),
tenantId, tenantDomain);
String idpName = newIdentityProvider.getIdentityProviderName();
String metadata = handleMetadta(newIdentityProvider);
if (isMetadataFileExist(idpName, metadata)) {
if (SAML2SSOMetadataConverter != null) {
SAML2SSOMetadataConverter.saveMetadataString(tenantId, idpName, metadata);
} else {
String data = "Couldn't save metadata in registry.SAML2SSOMetadataConverter is not set.";
throw IdPManagementUtil.handleServerException(IdPManagementConstants.ErrorMessage.ERROR_CODE_ADD_IDP,
data);
}
}
IdentityProvider updateIdP = dao.updateIdP(newIdentityProvider, currentIdentityProvider, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateIdPByResourceId(resourceId, currentIdentityProvider,
newIdentityProvider, tenantDomain)) {
return null;
}
}
return updateIdP;
}
/**
* Get the authenticators registered in the system.
*
* @return <code>FederatedAuthenticatorConfig</code> array.
* @throws IdentityProviderManagementException Error when getting authenticators registered
* in the system
*/
@Override
public FederatedAuthenticatorConfig[] getAllFederatedAuthenticators()
throws IdentityProviderManagementException {
List<FederatedAuthenticatorConfig> appConfig = ApplicationAuthenticatorService
.getInstance().getFederatedAuthenticators();
if (CollectionUtils.isNotEmpty(appConfig)) {
return appConfig.toArray(new FederatedAuthenticatorConfig[appConfig.size()]);
}
return new FederatedAuthenticatorConfig[0];
}
/**
* Get the Provisioning Connectors registered in the system.
*
* @return <code>ProvisioningConnectorConfig</code> array.
* @throws IdentityProviderManagementException
*/
@Override
public ProvisioningConnectorConfig[] getAllProvisioningConnectors()
throws IdentityProviderManagementException {
List<ProvisioningConnectorConfig> connectorConfigs = ProvisioningConnectorService
.getInstance().getProvisioningConnectorConfigs();
if (connectorConfigs != null && connectorConfigs.size() > 0) {
return connectorConfigs.toArray(new ProvisioningConnectorConfig[connectorConfigs.size()]);
}
return null;
}
private boolean validateIdPEntityId(FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs,
int tenantId, String tenantDomain) throws IdentityProviderManagementException {
if (federatedAuthenticatorConfigs != null) {
for (FederatedAuthenticatorConfig authConfig : federatedAuthenticatorConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME.equals(authConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(authConfig.getName())) {
Property[] properties = authConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(
property.getName())) {
if (dao.isIdPAvailableForAuthenticatorProperty(authConfig.getName(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID,
property.getValue(), tenantId)) {
String msg = "An Identity Provider Entity ID has already been registered with the " +
"name '" + property.getValue() + "' for tenant '" + tenantDomain + "'";
throw new IdentityProviderManagementException(msg);
}
return true;
}
}
}
}
}
}
return true;
}
private boolean validateUpdateOfIdPEntityId(FederatedAuthenticatorConfig[] currentFederatedAuthConfigs,
FederatedAuthenticatorConfig[] newFederatedAuthConfigs,
int tenantId, String tenantDomain)
throws IdentityProviderManagementException {
String currentIdentityProviderEntityId = null;
if (currentFederatedAuthConfigs != null) {
for (FederatedAuthenticatorConfig fedAuthnConfig : currentFederatedAuthConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME.equals(fedAuthnConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(fedAuthnConfig.getName())) {
Property[] properties = fedAuthnConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals
(property.getName())) {
currentIdentityProviderEntityId = property.getValue();
break;
}
}
}
break;
}
}
}
if (newFederatedAuthConfigs != null) {
for (FederatedAuthenticatorConfig fedAuthnConfig : newFederatedAuthConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME.equals(fedAuthnConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(fedAuthnConfig.getName())) {
Property[] properties = fedAuthnConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.
getName())) {
if (currentIdentityProviderEntityId != null && currentIdentityProviderEntityId.equals
(property.getValue())) {
return true;
} else {
if (dao.isIdPAvailableForAuthenticatorProperty(fedAuthnConfig.getName(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID,
property.getValue(), tenantId)) {
String msg = "An Identity Provider Entity ID has already been registered " +
"with the name '" +
property.getValue() + "' for tenant '" + tenantDomain + "'";
throw new IdentityProviderManagementException(msg);
}
return true;
}
}
}
}
break;
}
}
}
return true;
}
private String getOIDCResidentIdPEntityId() {
String OIDCEntityId = IdentityUtil.getProperty("OAuth.OpenIDConnect.IDTokenIssuerID");
if (StringUtils.isBlank(OIDCEntityId)) {
OIDCEntityId = "localhost";
}
return OIDCEntityId;
}
public String getResidentIDPMetadata(String tenantDomain) throws IdentityProviderManagementException {
if (IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().isEmpty()) {
throw new IdentityProviderManagementException("Error receiving Metadata object");
}
IdentityProvider residentIdentityProvider = this.getResidentIdP(tenantDomain);
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs = residentIdentityProvider.getFederatedAuthenticatorConfigs();
FederatedAuthenticatorConfig samlFederatedAuthenticatorConfig = null;
for (int i = 0; i < federatedAuthenticatorConfigs.length; i++) {
if (federatedAuthenticatorConfigs[i].getName().equals(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME)) {
samlFederatedAuthenticatorConfig = federatedAuthenticatorConfigs[i];
break;
}
}
if (samlFederatedAuthenticatorConfig != null) {
try {
for (int t = 0; t < IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().size(); t++) {
MetadataConverter converter = IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()
.get(t);
if (converter.canHandle(samlFederatedAuthenticatorConfig)) {
return converter.getMetadataString(samlFederatedAuthenticatorConfig);
}
}
} catch (IdentityProviderSAMLException e) {
throw new IdentityProviderManagementException(e.getMessage());
}
}
return null;
}
/**
* Overrides the persisted endpoint URLs (e.g. SAML endpoint) if the hostname/port has been changed.
* @param residentIDP
* @throws IdentityProviderManagementException
*/
private void overrideResidentIdpEPUrls(IdentityProvider residentIDP)
throws IdentityProviderManagementException {
// Not all endpoints are persisted. So we need to update only a few properties.
String passiveStsUrl = IdentityUtil.getServerURL(IdentityConstants.STS.PASSIVE_STS, true, true);
updateFederationAuthenticationConfigProperty(residentIDP,
IdentityApplicationConstants.Authenticator.PassiveSTS.NAME, IdentityApplicationConstants
.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL, passiveStsUrl);
}
/**
* Updates the property values of the given property name of the given authenticator.
*
* @param residentIdentityProvider
* @param authenticatorName
* @param propertyName
* @param newValue
* @return true if the value was updated, false if the value is up to date.
*/
private boolean updateFederationAuthenticationConfigProperty(IdentityProvider residentIdentityProvider, String
authenticatorName, String propertyName, String newValue) {
FederatedAuthenticatorConfig federatedAuthenticatorConfig = IdentityApplicationManagementUtil
.getFederatedAuthenticator(residentIdentityProvider.getFederatedAuthenticatorConfigs(),
authenticatorName);
if (federatedAuthenticatorConfig != null) {
Property existingProperty = IdentityApplicationManagementUtil.getProperty(federatedAuthenticatorConfig
.getProperties(), propertyName);
if (existingProperty != null) {
String existingPropertyValue = existingProperty.getValue();
if (!StringUtils.equalsIgnoreCase(existingPropertyValue, newValue)) {
existingProperty.setValue(newValue);
return true;
}
}
}
return false;
}
private String getTenantUrl(String url, String tenantDomain) throws URISyntaxException {
URI uri = new URI(url);
URI uriModified = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), ("/t/" +
tenantDomain + uri.getPath()), uri.getQuery(), uri.getFragment());
return uriModified.toString();
}
private void verifyAndUpdateRoleConfiguration(String tenantDomain, int tenantId,
PermissionsAndRoleConfig roleConfiguration) throws IdentityProviderManagementException {
List<RoleMapping> validRoleMappings = new ArrayList<>();
List<String> validIdPRoles = new ArrayList<>();
for (RoleMapping mapping : roleConfiguration.getRoleMappings()) {
try {
if (mapping.getRemoteRole() == null || mapping.getLocalRole() == null || StringUtils
.isBlank(mapping.getLocalRole().getLocalRoleName())) {
continue;
}
UserStoreManager usm = IdPManagementServiceComponent.getRealmService().getTenantUserRealm(tenantId)
.getUserStoreManager();
String role = mapping.getLocalRole().getLocalRoleName();
if (StringUtils.isNotBlank(mapping.getLocalRole().getUserStoreId())) {
role = IdentityUtil.addDomainToName(role, mapping.getLocalRole().getUserStoreId());
}
// Remove invalid mappings if local role does not exists.
if (usm.isExistingRole(role)) {
validRoleMappings.add(mapping);
validIdPRoles.add(mapping.getRemoteRole());
} else {
if (log.isDebugEnabled()) {
log.debug("Invalid local role name: " + role + " for the federated role: " + mapping
.getRemoteRole());
}
}
} catch (UserStoreException e) {
throw new IdentityProviderManagementException(
"Error occurred while retrieving UserStoreManager for tenant " + tenantDomain, e);
}
}
roleConfiguration.setRoleMappings(validRoleMappings.toArray(new RoleMapping[0]));
roleConfiguration.setIdpRoles(validIdPRoles.toArray(new String[0]));
}
/**
* Validate input parameters for the getIdPByResourceId function.
*
* @param resourceId Identity Provider resource ID.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateGetIdPInputValues(String resourceId) throws IdentityProviderManagementException {
if (StringUtils.isEmpty(resourceId)) {
String data = "Invalid argument: Identity Provider resource ID value is empty";
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_GET_REQUEST_INVALID, data);
}
}
/**
* Validate input parameters for the addIdPWithResourceId function.
*
* @param idpName Identity Provider name.
* @param tenantDomain Tenant domain of IDP.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateAddIdPInputValues(String idpName, String tenantDomain) throws
IdentityProviderManagementException {
if (IdentityProviderManager.getInstance().getIdPByName(idpName, tenantDomain, true) != null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, idpName);
}
if (IdPManagementServiceComponent.getFileBasedIdPs().containsKey(idpName)
&& !idpName.startsWith(IdPManagementConstants.SHARED_IDP_PREFIX)) {
//If an IDP with name starting with "SHARED_" is added from UI, It's blocked at the service class
// before calling this method
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, idpName);
}
}
/**
* Validate input parameters for the updateIdPByResourceId function.
*
* @param currentIdentityProvider Old Identity Provider Information.
* @param resourceId Identity Provider's resource ID.
* @param newIdPName New Identity Provider name.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateUpdateIdPInputValues(IdentityProvider currentIdentityProvider, String resourceId, String
newIdPName) throws IdentityProviderManagementException {
if (IdPManagementServiceComponent.getFileBasedIdPs().containsKey(newIdPName)) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, newIdPName);
}
if (currentIdentityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, resourceId);
}
}
/**
* Check whether PermissionAndRoleConfig is configured for the IDP.
*
* @param identityProvider Identity Provider information.
* @return whether config exists.
*/
private boolean isPermissionAndRoleConfigExist(IdentityProvider identityProvider) {
return identityProvider.getPermissionAndRoleConfig() != null
&& identityProvider.getPermissionAndRoleConfig().getRoleMappings() != null;
}
/**
* Check whether metadata file is configured for the IDP.
*
* @param idpName Identity Provider name.
* @param metadata Metadata string.
* @return whether metadata exists.
*/
private boolean isMetadataFileExist(String idpName, String metadata) {
return StringUtils.isNotEmpty(idpName) && StringUtils.isNotEmpty(metadata);
}
}
|
Idp Pagination validate offset
|
components/idp-mgt/org.wso2.carbon.idp.mgt/src/main/java/org/wso2/carbon/idp/mgt/IdentityProviderManager.java
|
Idp Pagination validate offset
|
|
Java
|
apache-2.0
|
acd1fbc36bf19233d0abe2e993c42af9408683d3
| 0
|
metaborg/nabl,metaborg/nabl,metaborg/nabl
|
package mb.nabl2.terms.stratego;
import java.util.ArrayList;
import java.util.List;
import org.spoofax.interpreter.terms.IStrategoTerm;
import org.spoofax.interpreter.terms.ITermFactory;
import org.spoofax.terms.io.TermFactoryVisitor;
import org.spoofax.terms.io.TermVisitor;
import mb.nabl2.terms.ITerm;
import mb.nabl2.terms.build.Attachments;
import mb.nabl2.terms.build.ITermBuild;
public abstract class TermBuildVisitor implements TermVisitor {
private final ITermBuild build;
private final ITermFactory factory;
public TermBuildVisitor(ITermBuild build, ITermFactory factory) {
this.build = build;
this.factory = factory;
}
public abstract void setTerm(ITerm term);
private Integer i;
@Override public void visitInt(int value) {
visit();
this.i = value;
}
@Override public void endInt() {
setTerm(withAnnos(build.newInt(i)));
}
@SuppressWarnings("unused") private Double d;
@Override public void visitReal(double value) {
visit();
this.d = value;
}
@Override public void endReal() {
throw new IllegalArgumentException("Reals are not supported.");
}
private String s;
@Override public void visitString(String value) {
visit();
this.s = value;
}
@Override public void endString() {
setTerm(withAnnos(build.newString(s)));
}
private String c;
@Override public void visitAppl(String name) {
visit();
this.c = name;
}
@Override public void endAppl() {
setTerm(withAnnos(build.newAppl(c, subTerms)));
}
@Override public void visitTuple() {
visit();
}
@Override public void endTuple() {
setTerm(withAnnos(build.newTuple(subTerms)));
}
@Override public void visitList() {
visit();
}
@Override public void endList() {
setTerm(withAnnos(build.newList(subTerms)));
}
@Override public TermVisitor visitPlaceholder() {
throw new IllegalArgumentException("Placeholders are not supported.");
}
List<ITerm> subTerms = new ArrayList<>();
@Override public TermVisitor visitSubTerm() {
TermBuildVisitor outer = this;
return new TermBuildVisitor(build, factory) {
@Override public void setTerm(ITerm subTerm) {
outer.subTerms.add(subTerm);
}
};
}
List<IStrategoTerm> annos = new ArrayList<>();
@Override public TermVisitor visitAnnotation() {
TermBuildVisitor outer = this;
return new TermFactoryVisitor(factory) {
@Override public void setTerm(IStrategoTerm anno) {
outer.annos.add(anno);
}
};
}
private ITerm withAnnos(ITerm term) {
if(annos.isEmpty()) {
return term;
} else {
return term.withAttachments(Attachments.of(StrategoAnnotations.class, StrategoAnnotations.of(annos)));
}
}
private void visit() {
i = null;
d = null;
s = null;
c = null;
subTerms.clear();
annos.clear();
}
}
|
nabl2.terms/src/main/java/mb/nabl2/terms/stratego/TermBuildVisitor.java
|
package mb.nabl2.terms.stratego;
import java.util.ArrayList;
import java.util.List;
import org.spoofax.interpreter.terms.IStrategoTerm;
import org.spoofax.interpreter.terms.ITermFactory;
import org.spoofax.terms.io.TermFactoryVisitor;
import org.spoofax.terms.io.TermVisitor;
import mb.nabl2.terms.ITerm;
import mb.nabl2.terms.build.Attachments;
import mb.nabl2.terms.build.ITermBuild;
public abstract class TermBuildVisitor implements TermVisitor {
private final ITermBuild build;
private final ITermFactory factory;
public TermBuildVisitor(ITermBuild build, ITermFactory factory) {
this.build = build;
this.factory = factory;
}
public abstract void setTerm(ITerm term);
private Integer i;
@Override public void visitInt(int value) {
visit();
this.i = value;
}
@Override public void endInt() {
setTerm(withAnnos(build.newInt(i)));
}
@SuppressWarnings("unused") private Double d;
@Override public void visitReal(double value) {
visit();
this.d = value;
}
@Override public void endReal() {
throw new IllegalArgumentException("Reals are not supported.");
}
private String s;
@Override public void visitString(String value) {
visit();
this.s = value;
}
@Override public void endString() {
setTerm(withAnnos(build.newString(s)));
}
private String c;
@Override public void visitAppl(String name) {
visit();
this.c = name;
}
@Override public void endAppl() {
setTerm(withAnnos(build.newAppl(c, subTerms)));
}
@Override public void visitTuple() {
visit();
}
@Override public void endTuple() {
setTerm(withAnnos(build.newTuple(subTerms)));
}
@Override public void visitList() {
visit();
}
@Override public void endList() {
setTerm(withAnnos(build.newList(subTerms)));
}
@Override public TermVisitor visitPlaceholder() {
throw new IllegalArgumentException("Placeholders are not supported.");
}
List<ITerm> subTerms = new ArrayList<>();
@Override public TermVisitor visitSubTerm() {
return new TermBuildVisitor(build, factory) {
@Override public void setTerm(ITerm subTerm) {
subTerms.add(subTerm);
}
};
}
List<IStrategoTerm> annos = new ArrayList<>();
@Override public TermVisitor visitAnnotation() {
return new TermFactoryVisitor(factory) {
@Override public void setTerm(IStrategoTerm anno) {
annos.add(anno);
}
};
}
private ITerm withAnnos(ITerm term) {
if(annos.isEmpty()) {
return term;
} else {
return term.withAttachments(Attachments.of(StrategoAnnotations.class, StrategoAnnotations.of(annos)));
}
}
private void visit() {
i = null;
d = null;
s = null;
c = null;
subTerms.clear();
annos.clear();
}
}
|
Fix field scoping.
|
nabl2.terms/src/main/java/mb/nabl2/terms/stratego/TermBuildVisitor.java
|
Fix field scoping.
|
|
Java
|
apache-2.0
|
bb2c02d14478ec3c1256f8739dd3e9a09deacb10
| 0
|
wido/cloudstack,wido/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,jcshen007/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,jcshen007/cloudstack,wido/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,jcshen007/cloudstack
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.storage.resource;
import static com.cloud.utils.S3Utils.mputFile;
import static com.cloud.utils.S3Utils.putFile;
import static com.cloud.utils.StringUtils.join;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static org.apache.commons.lang.StringUtils.substringAfterLast;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.naming.ConfigurationException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.storage.Storage;
import com.cloud.storage.template.TemplateConstants;
import com.cloud.utils.EncryptionUtil;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.codec.http.HttpContentCompressor;
import io.netty.handler.codec.http.HttpRequestDecoder;
import io.netty.handler.codec.http.HttpResponseEncoder;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import org.apache.cloudstack.storage.command.TemplateOrVolumePostUploadCommand;
import org.apache.cloudstack.storage.template.UploadEntity;
import org.apache.cloudstack.utils.imagestore.ImageStoreUtil;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.log4j.Logger;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import org.apache.cloudstack.framework.security.keystore.KeystoreManager;
import org.apache.cloudstack.storage.command.CopyCmdAnswer;
import org.apache.cloudstack.storage.command.CopyCommand;
import org.apache.cloudstack.storage.command.DeleteCommand;
import org.apache.cloudstack.storage.command.DownloadCommand;
import org.apache.cloudstack.storage.command.DownloadProgressCommand;
import org.apache.cloudstack.storage.command.UploadStatusAnswer;
import org.apache.cloudstack.storage.command.UploadStatusAnswer.UploadStatus;
import org.apache.cloudstack.storage.command.UploadStatusCommand;
import org.apache.cloudstack.storage.template.DownloadManager;
import org.apache.cloudstack.storage.template.DownloadManagerImpl;
import org.apache.cloudstack.storage.template.DownloadManagerImpl.ZfsPathParser;
import org.apache.cloudstack.storage.template.UploadManager;
import org.apache.cloudstack.storage.template.UploadManagerImpl;
import org.apache.cloudstack.storage.to.SnapshotObjectTO;
import org.apache.cloudstack.storage.to.TemplateObjectTO;
import org.apache.cloudstack.storage.to.VolumeObjectTO;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.CheckHealthAnswer;
import com.cloud.agent.api.CheckHealthCommand;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.ComputeChecksumCommand;
import com.cloud.agent.api.DeleteSnapshotsDirCommand;
import com.cloud.agent.api.GetStorageStatsAnswer;
import com.cloud.agent.api.GetStorageStatsCommand;
import com.cloud.agent.api.PingCommand;
import com.cloud.agent.api.PingStorageCommand;
import com.cloud.agent.api.ReadyAnswer;
import com.cloud.agent.api.ReadyCommand;
import com.cloud.agent.api.SecStorageFirewallCfgCommand;
import com.cloud.agent.api.SecStorageFirewallCfgCommand.PortConfig;
import com.cloud.agent.api.SecStorageSetupAnswer;
import com.cloud.agent.api.SecStorageSetupCommand;
import com.cloud.agent.api.SecStorageVMSetupCommand;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupSecondaryStorageCommand;
import com.cloud.agent.api.storage.CreateEntityDownloadURLCommand;
import com.cloud.agent.api.storage.DeleteEntityDownloadURLCommand;
import com.cloud.agent.api.storage.DownloadAnswer;
import com.cloud.agent.api.storage.ListTemplateAnswer;
import com.cloud.agent.api.storage.ListTemplateCommand;
import com.cloud.agent.api.storage.ListVolumeAnswer;
import com.cloud.agent.api.storage.ListVolumeCommand;
import com.cloud.agent.api.storage.UploadCommand;
import com.cloud.agent.api.to.DataObjectType;
import com.cloud.agent.api.to.DataStoreTO;
import com.cloud.agent.api.to.DataTO;
import com.cloud.agent.api.to.NfsTO;
import com.cloud.agent.api.to.S3TO;
import com.cloud.agent.api.to.SwiftTO;
import com.cloud.exception.InternalErrorException;
import com.cloud.host.Host;
import com.cloud.host.Host.Type;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.resource.ServerResourceBase;
import com.cloud.storage.DataStoreRole;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.StorageLayer;
import com.cloud.storage.VMTemplateStorageResourceAssoc;
import com.cloud.storage.template.OVAProcessor;
import com.cloud.storage.template.Processor;
import com.cloud.storage.template.Processor.FormatInfo;
import com.cloud.storage.template.QCOW2Processor;
import com.cloud.storage.template.RawImageProcessor;
import com.cloud.storage.template.TARProcessor;
import com.cloud.storage.template.TemplateLocation;
import com.cloud.storage.template.TemplateProp;
import com.cloud.storage.template.VhdProcessor;
import com.cloud.storage.template.VmdkProcessor;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.S3Utils;
import com.cloud.utils.S3Utils.FileNamingStrategy;
import com.cloud.utils.SwiftUtil;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.NetUtils;
import com.cloud.utils.script.OutputInterpreter;
import com.cloud.utils.script.Script;
import com.cloud.vm.SecondaryStorageVm;
import org.joda.time.DateTime;
import org.joda.time.format.ISODateTimeFormat;
public class NfsSecondaryStorageResource extends ServerResourceBase implements SecondaryStorageResource {
private static final Logger s_logger = Logger.getLogger(NfsSecondaryStorageResource.class);
private static final String TEMPLATE_ROOT_DIR = "template/tmpl";
private static final String VOLUME_ROOT_DIR = "volumes";
private static final String POST_UPLOAD_KEY_LOCATION = "/etc/cloudstack/agent/ms-psk";
int _timeout;
public int getTimeout() {
return _timeout;
}
public void setTimeout(int timeout) {
_timeout = timeout;
}
String _instance;
String _dc;
String _pod;
String _guid;
String _role;
Map<String, Object> _params;
protected StorageLayer _storage;
protected boolean _inSystemVM = false;
boolean _sslCopy = false;
protected DownloadManager _dlMgr;
protected UploadManager _upldMgr;
private String _configSslScr;
private String _configAuthScr;
private String _configIpFirewallScr;
private String _publicIp;
private String _hostname;
private String _localgw;
private String _eth1mask;
private String _eth1ip;
private String _storageIp;
private String _storageNetmask;
private String _storageGateway;
private final List<String> nfsIps = new ArrayList<String>();
protected String _parent = "/mnt/SecStorage";
final private String _tmpltpp = "template.properties";
protected String createTemplateFromSnapshotXenScript;
private HashMap<String,UploadEntity> uploadEntityStateMap = new HashMap<String,UploadEntity>();
private String _ssvmPSK = null;
public void setParentPath(String path) {
_parent = path;
}
public String getMountingRoot() {
return _parent;
}
@Override
public void disconnected() {
}
public void setInSystemVM(boolean inSystemVM) {
_inSystemVM = inSystemVM;
}
@Override
public Answer executeRequest(Command cmd) {
if (cmd instanceof DownloadProgressCommand) {
return _dlMgr.handleDownloadCommand(this, (DownloadProgressCommand)cmd);
} else if (cmd instanceof DownloadCommand) {
return execute((DownloadCommand)cmd);
} else if (cmd instanceof UploadCommand) {
return _upldMgr.handleUploadCommand(this, (UploadCommand)cmd);
} else if (cmd instanceof CreateEntityDownloadURLCommand) {
return _upldMgr.handleCreateEntityURLCommand((CreateEntityDownloadURLCommand)cmd);
} else if (cmd instanceof DeleteEntityDownloadURLCommand) {
return _upldMgr.handleDeleteEntityDownloadURLCommand((DeleteEntityDownloadURLCommand)cmd);
} else if (cmd instanceof GetStorageStatsCommand) {
return execute((GetStorageStatsCommand)cmd);
} else if (cmd instanceof CheckHealthCommand) {
return new CheckHealthAnswer((CheckHealthCommand)cmd, true);
} else if (cmd instanceof ReadyCommand) {
return new ReadyAnswer((ReadyCommand)cmd);
} else if (cmd instanceof SecStorageFirewallCfgCommand) {
return execute((SecStorageFirewallCfgCommand)cmd);
} else if (cmd instanceof SecStorageVMSetupCommand) {
return execute((SecStorageVMSetupCommand)cmd);
} else if (cmd instanceof SecStorageSetupCommand) {
return execute((SecStorageSetupCommand)cmd);
} else if (cmd instanceof ComputeChecksumCommand) {
return execute((ComputeChecksumCommand)cmd);
} else if (cmd instanceof ListTemplateCommand) {
return execute((ListTemplateCommand)cmd);
} else if (cmd instanceof ListVolumeCommand) {
return execute((ListVolumeCommand)cmd);
} else if (cmd instanceof DeleteSnapshotsDirCommand) {
return execute((DeleteSnapshotsDirCommand)cmd);
} else if (cmd instanceof CopyCommand) {
return execute((CopyCommand)cmd);
} else if (cmd instanceof DeleteCommand) {
return execute((DeleteCommand)cmd);
} else if (cmd instanceof UploadStatusCommand) {
return execute((UploadStatusCommand)cmd);
} else {
return Answer.createUnsupportedCommandAnswer(cmd);
}
}
protected CopyCmdAnswer postProcessing(File destFile, String downloadPath, String destPath, DataTO srcData, DataTO destData) throws ConfigurationException {
if (destData.getObjectType() == DataObjectType.SNAPSHOT) {
SnapshotObjectTO snapshot = new SnapshotObjectTO();
snapshot.setPath(destPath + File.separator + destFile.getName());
CopyCmdAnswer answer = new CopyCmdAnswer(snapshot);
return answer;
}
// do post processing to unzip the file if it is compressed
String scriptsDir = "scripts/storage/secondary";
String createTmpltScr = Script.findScript(scriptsDir, "createtmplt.sh");
if (createTmpltScr == null) {
throw new ConfigurationException("Unable to find createtmplt.sh");
}
s_logger.info("createtmplt.sh found in " + createTmpltScr);
String createVolScr = Script.findScript(scriptsDir, "createvolume.sh");
if (createVolScr == null) {
throw new ConfigurationException("Unable to find createvolume.sh");
}
s_logger.info("createvolume.sh found in " + createVolScr);
String script = srcData.getObjectType() == DataObjectType.TEMPLATE ? createTmpltScr : createVolScr;
int installTimeoutPerGig = 180 * 60 * 1000;
long imgSizeGigs = (long)Math.ceil(destFile.length() * 1.0d / (1024 * 1024 * 1024));
imgSizeGigs++; // add one just in case
long timeout = imgSizeGigs * installTimeoutPerGig;
String origPath = destFile.getAbsolutePath();
String extension = null;
if (srcData.getObjectType() == DataObjectType.TEMPLATE) {
extension = ((TemplateObjectTO)srcData).getFormat().getFileExtension();
} else if (srcData.getObjectType() == DataObjectType.VOLUME) {
extension = ((VolumeObjectTO)srcData).getFormat().getFileExtension();
}
String templateName = UUID.randomUUID().toString();
String templateFilename = templateName + "." + extension;
Script scr = new Script(script, timeout, s_logger);
scr.add("-s", Long.toString(imgSizeGigs)); // not used for now
scr.add("-n", templateFilename);
scr.add("-t", downloadPath);
scr.add("-f", origPath); // this is the temporary
// template file downloaded
String result;
result = scr.execute();
if (result != null) {
// script execution failure
throw new CloudRuntimeException("Failed to run script " + script);
}
String finalFileName = templateFilename;
String finalDownloadPath = destPath + File.separator + templateFilename;
// compute the size of
long size = _storage.getSize(downloadPath + File.separator + templateFilename);
DataTO newDestTO = null;
if (destData.getObjectType() == DataObjectType.TEMPLATE) {
TemplateObjectTO newTemplTO = new TemplateObjectTO();
newTemplTO.setPath(finalDownloadPath);
newTemplTO.setName(finalFileName);
newTemplTO.setSize(size);
newTemplTO.setPhysicalSize(size);
newDestTO = newTemplTO;
} else {
VolumeObjectTO newVolTO = new VolumeObjectTO();
newVolTO.setPath(finalDownloadPath);
newVolTO.setName(finalFileName);
newVolTO.setSize(size);
newDestTO = newVolTO;
}
return new CopyCmdAnswer(newDestTO);
}
protected Answer copyFromSwiftToNfs(CopyCommand cmd, DataTO srcData, SwiftTO swiftTO, DataTO destData, NfsTO destImageStore) {
final String storagePath = destImageStore.getUrl();
final String destPath = destData.getPath();
try {
String downloadPath = determineStorageTemplatePath(storagePath, destPath);
final File downloadDirectory = _storage.getFile(downloadPath);
if (!downloadDirectory.mkdirs()) {
return new CopyCmdAnswer("Failed to create download directory " + downloadPath);
}
File destFile = SwiftUtil.getObject(swiftTO, downloadDirectory, srcData.getPath());
return postProcessing(destFile, downloadPath, destPath, srcData, destData);
} catch (Exception e) {
s_logger.debug("Failed to copy swift to nfs", e);
return new CopyCmdAnswer(e.toString());
}
}
protected Answer copyFromS3ToNfs(CopyCommand cmd, DataTO srcData, S3TO s3, DataTO destData, NfsTO destImageStore) {
final String storagePath = destImageStore.getUrl();
final String destPath = destData.getPath();
try {
String downloadPath = determineStorageTemplatePath(storagePath, destPath);
final File downloadDirectory = _storage.getFile(downloadPath);
if (downloadDirectory.exists()) {
s_logger.debug("Directory " + downloadPath + " already exists");
} else {
if (!downloadDirectory.mkdirs()) {
final String errMsg = "Unable to create directory " + downloadPath + " to copy from S3 to cache.";
s_logger.error(errMsg);
return new CopyCmdAnswer(errMsg);
}
}
File destFile = S3Utils.getFile(s3, s3.getBucketName(), srcData.getPath(), downloadDirectory, new FileNamingStrategy() {
@Override
public String determineFileName(final String key) {
return substringAfterLast(key, S3Utils.SEPARATOR);
}
});
if (destFile == null) {
return new CopyCmdAnswer("Can't find template");
}
return postProcessing(destFile, downloadPath, destPath, srcData, destData);
} catch (Exception e) {
final String errMsg = format("Failed to download" + "due to $2%s", e.getMessage());
s_logger.error(errMsg, e);
return new CopyCmdAnswer(errMsg);
}
}
protected Answer copySnapshotToTemplateFromNfsToNfsXenserver(CopyCommand cmd, SnapshotObjectTO srcData, NfsTO srcDataStore, TemplateObjectTO destData,
NfsTO destDataStore) {
String srcMountPoint = getRootDir(srcDataStore.getUrl());
String snapshotPath = srcData.getPath();
int index = snapshotPath.lastIndexOf("/");
String snapshotName = snapshotPath.substring(index + 1);
if (!snapshotName.startsWith("VHD-") && !snapshotName.endsWith(".vhd")) {
snapshotName = snapshotName + ".vhd";
}
snapshotPath = snapshotPath.substring(0, index);
snapshotPath = srcMountPoint + File.separator + snapshotPath;
String destMountPoint = getRootDir(destDataStore.getUrl());
String destPath = destMountPoint + File.separator + destData.getPath();
String errMsg = null;
try {
_storage.mkdir(destPath);
String templateUuid = UUID.randomUUID().toString();
String templateName = templateUuid + ".vhd";
Script command = new Script(createTemplateFromSnapshotXenScript, cmd.getWait() * 1000, s_logger);
command.add("-p", snapshotPath);
command.add("-s", snapshotName);
command.add("-n", templateName);
command.add("-t", destPath);
String result = command.execute();
if (result != null && !result.equalsIgnoreCase("")) {
return new CopyCmdAnswer(result);
}
Map<String, Object> params = new HashMap<String, Object>();
params.put(StorageLayer.InstanceConfigKey, _storage);
Processor processor = new VhdProcessor();
processor.configure("Vhd Processor", params);
FormatInfo info = processor.process(destPath, null, templateUuid);
TemplateLocation loc = new TemplateLocation(_storage, destPath);
loc.create(1, true, templateUuid);
loc.addFormat(info);
loc.save();
TemplateProp prop = loc.getTemplateInfo();
TemplateObjectTO newTemplate = new TemplateObjectTO();
newTemplate.setPath(destData.getPath() + File.separator + templateName);
newTemplate.setFormat(ImageFormat.VHD);
newTemplate.setSize(prop.getSize());
newTemplate.setPhysicalSize(prop.getPhysicalSize());
newTemplate.setName(templateUuid);
return new CopyCmdAnswer(newTemplate);
} catch (ConfigurationException e) {
s_logger.debug("Failed to create template from snapshot: " + e.toString());
errMsg = e.toString();
} catch (InternalErrorException e) {
s_logger.debug("Failed to create template from snapshot: " + e.toString());
errMsg = e.toString();
} catch (IOException e) {
s_logger.debug("Failed to create template from snapshot: " + e.toString());
errMsg = e.toString();
}
return new CopyCmdAnswer(errMsg);
}
protected Answer copySnapshotToTemplateFromNfsToNfs(CopyCommand cmd, SnapshotObjectTO srcData, NfsTO srcDataStore, TemplateObjectTO destData, NfsTO destDataStore) {
if (srcData.getHypervisorType() == HypervisorType.XenServer) {
return copySnapshotToTemplateFromNfsToNfsXenserver(cmd, srcData, srcDataStore, destData, destDataStore);
} else if (srcData.getHypervisorType() == HypervisorType.KVM) {
File srcFile = getFile(srcData.getPath(), srcDataStore.getUrl());
File destFile = getFile(destData.getPath(), destDataStore.getUrl());
VolumeObjectTO volumeObjectTO = srcData.getVolume();
ImageFormat srcFormat = null;
//TODO: the image format should be stored in snapshot table, instead of getting from volume
if (volumeObjectTO != null) {
srcFormat = volumeObjectTO.getFormat();
} else {
srcFormat = ImageFormat.QCOW2;
}
// get snapshot file name
String templateName = srcFile.getName();
// add kvm file extension for copied template name
String fileName = templateName + "." + srcFormat.getFileExtension();
String destFileFullPath = destFile.getAbsolutePath() + File.separator + fileName;
s_logger.debug("copy snapshot " + srcFile.getAbsolutePath() + " to template " + destFileFullPath);
Script.runSimpleBashScript("cp " + srcFile.getAbsolutePath() + " " + destFileFullPath);
String metaFileName = destFile.getAbsolutePath() + File.separator + "template.properties";
File metaFile = new File(metaFileName);
try {
_storage.create(destFile.getAbsolutePath(), "template.properties");
try ( // generate template.properties file
FileWriter writer = new FileWriter(metaFile);
BufferedWriter bufferWriter = new BufferedWriter(writer);
) {
// KVM didn't change template unique name, just used the template name passed from orchestration layer, so no need
// to send template name back.
bufferWriter.write("uniquename=" + destData.getName());
bufferWriter.write("\n");
bufferWriter.write("filename=" + fileName);
bufferWriter.write("\n");
long size = _storage.getSize(destFileFullPath);
bufferWriter.write("size=" + size);
/**
* Snapshots might be in either QCOW2 or RAW image format
*
* For example RBD snapshots are in RAW format
*/
Processor processor = null;
if (srcFormat == ImageFormat.QCOW2) {
processor = new QCOW2Processor();
} else if (srcFormat == ImageFormat.RAW) {
processor = new RawImageProcessor();
} else {
throw new ConfigurationException("Unknown image format " + srcFormat.toString());
}
Map<String, Object> params = new HashMap<String, Object>();
params.put(StorageLayer.InstanceConfigKey, _storage);
processor.configure("template processor", params);
String destPath = destFile.getAbsolutePath();
FormatInfo info = processor.process(destPath, null, templateName);
TemplateLocation loc = new TemplateLocation(_storage, destPath);
loc.create(1, true, destData.getName());
loc.addFormat(info);
loc.save();
TemplateProp prop = loc.getTemplateInfo();
TemplateObjectTO newTemplate = new TemplateObjectTO();
newTemplate.setPath(destData.getPath() + File.separator + fileName);
newTemplate.setFormat(srcFormat);
newTemplate.setSize(prop.getSize());
newTemplate.setPhysicalSize(prop.getPhysicalSize());
return new CopyCmdAnswer(newTemplate);
} catch (ConfigurationException e) {
s_logger.debug("Failed to create template:" + e.toString());
return new CopyCmdAnswer(e.toString());
} catch (InternalErrorException e) {
s_logger.debug("Failed to create template:" + e.toString());
return new CopyCmdAnswer(e.toString());
}
} catch (IOException e) {
s_logger.debug("Failed to create template:" + e.toString());
return new CopyCmdAnswer(e.toString());
}
}
return new CopyCmdAnswer("");
}
protected File getFile(String path, String nfsPath) {
String filePath = getRootDir(nfsPath) + File.separator + path;
File f = new File(filePath);
if (!f.exists()) {
_storage.mkdirs(filePath);
f = new File(filePath);
}
return f;
}
protected Answer createTemplateFromSnapshot(CopyCommand cmd) {
DataTO srcData = cmd.getSrcTO();
DataTO destData = cmd.getDestTO();
DataStoreTO srcDataStore = srcData.getDataStore();
DataStoreTO destDataStore = destData.getDataStore();
if (srcDataStore.getRole() == DataStoreRole.Image || srcDataStore.getRole() == DataStoreRole.ImageCache || srcDataStore.getRole() == DataStoreRole.Primary) {
if (!(srcDataStore instanceof NfsTO)) {
s_logger.debug("only support nfs storage as src, when create template from snapshot");
return Answer.createUnsupportedCommandAnswer(cmd);
}
if (destDataStore instanceof NfsTO) {
return copySnapshotToTemplateFromNfsToNfs(cmd, (SnapshotObjectTO)srcData, (NfsTO)srcDataStore, (TemplateObjectTO)destData, (NfsTO)destDataStore);
} else if (destDataStore instanceof SwiftTO) {
//create template on the same data store
CopyCmdAnswer answer =
(CopyCmdAnswer)copySnapshotToTemplateFromNfsToNfs(cmd, (SnapshotObjectTO)srcData, (NfsTO)srcDataStore, (TemplateObjectTO)destData,
(NfsTO)srcDataStore);
if (!answer.getResult()) {
return answer;
}
s_logger.debug("starting copy template to swift");
DataTO newTemplate = answer.getNewData();
File templateFile = getFile(newTemplate.getPath(), ((NfsTO)srcDataStore).getUrl());
SwiftTO swift = (SwiftTO)destDataStore;
String containterName = SwiftUtil.getContainerName(destData.getObjectType().toString(), destData.getId());
String swiftPath = SwiftUtil.putObject(swift, templateFile, containterName, templateFile.getName());
//upload template.properties
File properties = new File(templateFile.getParent() + File.separator + _tmpltpp);
if (properties.exists()) {
SwiftUtil.putObject(swift, properties, containterName, _tmpltpp);
}
//clean up template data on staging area
try {
DeleteCommand deleteCommand = new DeleteCommand(newTemplate);
execute(deleteCommand);
} catch (Exception e) {
s_logger.debug("Failed to clean up staging area:", e);
}
TemplateObjectTO template = new TemplateObjectTO();
template.setPath(swiftPath);
template.setSize(templateFile.length());
template.setPhysicalSize(template.getSize());
SnapshotObjectTO snapshot = (SnapshotObjectTO)srcData;
template.setFormat(snapshot.getVolume().getFormat());
return new CopyCmdAnswer(template);
} else if (destDataStore instanceof S3TO) {
//create template on the same data store
CopyCmdAnswer answer =
(CopyCmdAnswer)copySnapshotToTemplateFromNfsToNfs(cmd, (SnapshotObjectTO)srcData, (NfsTO)srcDataStore, (TemplateObjectTO)destData,
(NfsTO)srcDataStore);
if (!answer.getResult()) {
return answer;
}
TemplateObjectTO newTemplate = (TemplateObjectTO)answer.getNewData();
newTemplate.setDataStore(srcDataStore);
CopyCommand newCpyCmd = new CopyCommand(newTemplate, destData, cmd.getWait(), cmd.executeInSequence());
Answer result = copyFromNfsToS3(newCpyCmd);
//clean up template data on staging area
try {
DeleteCommand deleteCommand = new DeleteCommand(newTemplate);
execute(deleteCommand);
} catch (Exception e) {
s_logger.debug("Failed to clean up staging area:", e);
}
return result;
}
}
s_logger.debug("Failed to create templat from snapshot");
return new CopyCmdAnswer("Unsupported prototcol");
}
protected Answer copyFromNfsToImage(CopyCommand cmd) {
DataTO destData = cmd.getDestTO();
DataStoreTO destDataStore = destData.getDataStore();
if (destDataStore instanceof S3TO) {
return copyFromNfsToS3(cmd);
} else {
return new CopyCmdAnswer("unsupported ");
}
}
protected Answer execute(CopyCommand cmd) {
DataTO srcData = cmd.getSrcTO();
DataTO destData = cmd.getDestTO();
DataStoreTO srcDataStore = srcData.getDataStore();
DataStoreTO destDataStore = destData.getDataStore();
if (srcData.getObjectType() == DataObjectType.SNAPSHOT && destData.getObjectType() == DataObjectType.TEMPLATE) {
return createTemplateFromSnapshot(cmd);
}
if (destDataStore instanceof NfsTO && destDataStore.getRole() == DataStoreRole.ImageCache) {
NfsTO destImageStore = (NfsTO)destDataStore;
if (srcDataStore instanceof S3TO) {
S3TO s3 = (S3TO)srcDataStore;
return copyFromS3ToNfs(cmd, srcData, s3, destData, destImageStore);
} else if (srcDataStore instanceof SwiftTO) {
return copyFromSwiftToNfs(cmd, srcData, (SwiftTO)srcDataStore, destData, destImageStore);
}
}
if (srcDataStore.getRole() == DataStoreRole.ImageCache && destDataStore.getRole() == DataStoreRole.Image) {
return copyFromNfsToImage(cmd);
}
return Answer.createUnsupportedCommandAnswer(cmd);
}
@SuppressWarnings("unchecked")
protected String determineS3TemplateDirectory(final Long accountId, final Long templateId, final String templateUniqueName) {
return join(asList(TEMPLATE_ROOT_DIR, accountId, templateId, templateUniqueName), S3Utils.SEPARATOR);
}
private String determineS3TemplateNameFromKey(String key) {
return StringUtils.substringAfterLast(StringUtils.substringBeforeLast(key, S3Utils.SEPARATOR), S3Utils.SEPARATOR);
}
@SuppressWarnings("unchecked")
protected String determineS3VolumeDirectory(final Long accountId, final Long volId) {
return join(asList(VOLUME_ROOT_DIR, accountId, volId), S3Utils.SEPARATOR);
}
protected Long determineS3VolumeIdFromKey(String key) {
return Long.parseLong(StringUtils.substringAfterLast(StringUtils.substringBeforeLast(key, S3Utils.SEPARATOR), S3Utils.SEPARATOR));
}
private String determineStorageTemplatePath(final String storagePath, String dataPath) {
return join(asList(getRootDir(storagePath), dataPath), File.separator);
}
protected File downloadFromUrlToNfs(String url, NfsTO nfs, String path, String name) {
HttpClient client = new DefaultHttpClient();
HttpGet get = new HttpGet(url);
try {
HttpResponse response = client.execute(get);
HttpEntity entity = response.getEntity();
if (entity == null) {
s_logger.debug("Faled to get entity");
throw new CloudRuntimeException("Failed to get url: " + url);
}
String nfsMountPath = getRootDir(nfs.getUrl());
String filePath = nfsMountPath + File.separator + path;
File directory = new File(filePath);
if (!directory.exists()) {
_storage.mkdirs(filePath);
}
File destFile = new File(filePath + File.separator + name);
if (!destFile.createNewFile()) {
s_logger.warn("Reusing existing file " + destFile.getPath());
}
try(FileOutputStream outputStream = new FileOutputStream(destFile);) {
entity.writeTo(outputStream);
}catch (IOException e) {
s_logger.debug("downloadFromUrlToNfs:Exception:"+e.getMessage(),e);
}
return new File(destFile.getAbsolutePath());
} catch (IOException e) {
s_logger.debug("Faild to get url:" + url + ", due to " + e.toString());
throw new CloudRuntimeException(e);
}
}
protected Answer registerTemplateOnSwift(DownloadCommand cmd) {
SwiftTO swiftTO = (SwiftTO)cmd.getDataStore();
String path = cmd.getInstallPath();
DataStoreTO cacheStore = cmd.getCacheStore();
if (cacheStore == null || !(cacheStore instanceof NfsTO)) {
return new DownloadAnswer("cache store can't be null", VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR);
}
File file = null;
try {
NfsTO nfsCacheStore = (NfsTO)cacheStore;
String fileName = cmd.getName() + "." + cmd.getFormat().getFileExtension();
file = downloadFromUrlToNfs(cmd.getUrl(), nfsCacheStore, path, fileName);
String container = "T-" + cmd.getId();
String swiftPath = SwiftUtil.putObject(swiftTO, file, container, null);
//put metda file
File uniqDir = _storage.createUniqDir();
String metaFileName = uniqDir.getAbsolutePath() + File.separator + "template.properties";
_storage.create(uniqDir.getAbsolutePath(), "template.properties");
File metaFile = new File(metaFileName);
FileWriter writer = new FileWriter(metaFile);
BufferedWriter bufferWriter = new BufferedWriter(writer);
bufferWriter.write("uniquename=" + cmd.getName());
bufferWriter.write("\n");
bufferWriter.write("filename=" + fileName);
bufferWriter.write("\n");
bufferWriter.write("size=" + file.length());
bufferWriter.close();
writer.close();
SwiftUtil.putObject(swiftTO, metaFile, container, "template.properties");
metaFile.delete();
uniqDir.delete();
String md5sum = null;
try (FileInputStream fs = new FileInputStream(file)){
md5sum = DigestUtils.md5Hex(fs);
} catch (IOException e) {
s_logger.debug("Failed to get md5sum: " + file.getAbsoluteFile());
}
DownloadAnswer answer =
new DownloadAnswer(null, 100, null, VMTemplateStorageResourceAssoc.Status.DOWNLOADED, swiftPath, swiftPath, file.length(), file.length(), md5sum);
return answer;
} catch (IOException e) {
s_logger.debug("Failed to register template into swift", e);
return new DownloadAnswer(e.toString(), VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR);
} finally {
if (file != null) {
file.delete();
}
}
}
private Answer execute(DownloadCommand cmd) {
DataStoreTO dstore = cmd.getDataStore();
if (dstore instanceof NfsTO || dstore instanceof S3TO) {
return _dlMgr.handleDownloadCommand(this, cmd);
} else if (dstore instanceof SwiftTO) {
return registerTemplateOnSwift(cmd);
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
private ImageFormat getTemplateFormat(String filePath) {
String ext = null;
int extensionPos = filePath.lastIndexOf('.');
int lastSeparator = Math.max(filePath.lastIndexOf('/'), filePath.lastIndexOf('\\'));
int i = lastSeparator > extensionPos ? -1 : extensionPos;
if (i > 0) {
ext = filePath.substring(i + 1);
}
if (ext != null) {
if (ext.equalsIgnoreCase("vhd")) {
return ImageFormat.VHD;
} else if (ext.equalsIgnoreCase("vhdx")) {
return ImageFormat.VHDX;
} else if (ext.equalsIgnoreCase("qcow2")) {
return ImageFormat.QCOW2;
} else if (ext.equalsIgnoreCase("ova")) {
return ImageFormat.OVA;
} else if (ext.equalsIgnoreCase("tar")) {
return ImageFormat.TAR;
} else if (ext.equalsIgnoreCase("img") || ext.equalsIgnoreCase("raw")) {
return ImageFormat.RAW;
} else if (ext.equalsIgnoreCase("vmdk")) {
return ImageFormat.VMDK;
} else if (ext.equalsIgnoreCase("vdi")) {
return ImageFormat.VDI;
}
}
return null;
}
protected long getVirtualSize(File file, ImageFormat format) {
Processor processor = null;
try {
if (format == null) {
return file.length();
} else if (format == ImageFormat.QCOW2) {
processor = new QCOW2Processor();
} else if (format == ImageFormat.OVA) {
processor = new OVAProcessor();
} else if (format == ImageFormat.VHD) {
processor = new VhdProcessor();
} else if (format == ImageFormat.RAW) {
processor = new RawImageProcessor();
} else if (format == ImageFormat.VMDK) {
processor = new VmdkProcessor();
} if (format == ImageFormat.TAR) {
processor = new TARProcessor();
}
if (processor == null) {
return file.length();
}
processor.configure("template processor", new HashMap<String, Object>());
return processor.getVirtualSize(file);
} catch (Exception e) {
s_logger.warn("Failed to get virtual size, returning file size instead:", e);
return file.length();
}
}
protected Answer copyFromNfsToS3(CopyCommand cmd) {
final DataTO srcData = cmd.getSrcTO();
final DataTO destData = cmd.getDestTO();
DataStoreTO srcDataStore = srcData.getDataStore();
NfsTO srcStore = (NfsTO)srcDataStore;
DataStoreTO destDataStore = destData.getDataStore();
final S3TO s3 = (S3TO)destDataStore;
try {
final String templatePath = determineStorageTemplatePath(srcStore.getUrl(), srcData.getPath());
if (s_logger.isDebugEnabled()) {
s_logger.debug("Found " + srcData.getObjectType() + " from directory " + templatePath + " to upload to S3.");
}
final String bucket = s3.getBucketName();
File srcFile = _storage.getFile(templatePath);
// guard the case where templatePath does not have file extension, since we are not completely sure
// about hypervisor, so we check each extension
if (!srcFile.exists()) {
srcFile = _storage.getFile(templatePath + ".qcow2");
if (!srcFile.exists()) {
srcFile = _storage.getFile(templatePath + ".vhd");
if (!srcFile.exists()) {
srcFile = _storage.getFile(templatePath + ".ova");
if (!srcFile.exists()) {
srcFile = _storage.getFile(templatePath + ".vmdk");
if (!srcFile.exists()) {
return new CopyCmdAnswer("Can't find src file:" + templatePath);
}
}
}
}
}
long srcSize = srcFile.length();
ImageFormat format = getTemplateFormat(srcFile.getName());
String key = destData.getPath() + S3Utils.SEPARATOR + srcFile.getName();
if (!s3.getSingleUpload(srcSize)) {
mputFile(s3, srcFile, bucket, key);
} else {
putFile(s3, srcFile, bucket, key);
}
DataTO retObj = null;
if (destData.getObjectType() == DataObjectType.TEMPLATE) {
TemplateObjectTO newTemplate = new TemplateObjectTO();
newTemplate.setPath(key);
newTemplate.setSize(getVirtualSize(srcFile, format));
newTemplate.setPhysicalSize(srcFile.length());
newTemplate.setFormat(format);
retObj = newTemplate;
} else if (destData.getObjectType() == DataObjectType.VOLUME) {
VolumeObjectTO newVol = new VolumeObjectTO();
newVol.setPath(key);
newVol.setSize(srcFile.length());
retObj = newVol;
} else if (destData.getObjectType() == DataObjectType.SNAPSHOT) {
SnapshotObjectTO newSnapshot = new SnapshotObjectTO();
newSnapshot.setPath(key);
retObj = newSnapshot;
}
return new CopyCmdAnswer(retObj);
} catch (Exception e) {
s_logger.error("failed to upload" + srcData.getPath(), e);
return new CopyCmdAnswer("failed to upload" + srcData.getPath() + e.toString());
}
}
String swiftDownload(SwiftTO swift, String container, String rfilename, String lFullPath) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " + swift.getAccount() + ":" +
swift.getUserName() + " -K " + swift.getKey() + " download " + container + " " + rfilename + " -o " + lFullPath);
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result != null) {
String errMsg = "swiftDownload failed err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
if (parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
for (String line : lines) {
if (line.contains("Errno") || line.contains("failed")) {
String errMsg = "swiftDownload failed , err=" + parser.getLines();
s_logger.warn(errMsg);
return errMsg;
}
}
}
return null;
}
String swiftDownloadContainer(SwiftTO swift, String container, String ldir) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("cd " + ldir + ";/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " + swift.getAccount() + ":" +
swift.getUserName() + " -K " + swift.getKey() + " download " + container);
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result != null) {
String errMsg = "swiftDownloadContainer failed err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
if (parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
for (String line : lines) {
if (line.contains("Errno") || line.contains("failed")) {
String errMsg = "swiftDownloadContainer failed , err=" + parser.getLines();
s_logger.warn(errMsg);
return errMsg;
}
}
}
return null;
}
String swiftUpload(SwiftTO swift, String container, String lDir, String lFilename) {
long SWIFT_MAX_SIZE = 5L * 1024L * 1024L * 1024L;
List<String> files = new ArrayList<String>();
if (lFilename.equals("*")) {
File dir = new File(lDir);
String [] dir_lst = dir.list();
if(dir_lst != null) {
for (String file : dir_lst) {
if (file.startsWith(".")) {
continue;
}
files.add(file);
}
}
} else {
files.add(lFilename);
}
for (String file : files) {
File f = new File(lDir + "/" + file);
long size = f.length();
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
if (size <= SWIFT_MAX_SIZE) {
command.add("cd " + lDir + ";/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " +
swift.getAccount() + ":" + swift.getUserName() + " -K " + swift.getKey() + " upload " + container + " " + file);
} else {
command.add("cd " + lDir + ";/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " +
swift.getAccount() + ":" + swift.getUserName() + " -K " + swift.getKey() + " upload -S " + SWIFT_MAX_SIZE + " " + container + " " + file);
}
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result != null) {
String errMsg = "swiftUpload failed , err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
if (parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
for (String line : lines) {
if (line.contains("Errno") || line.contains("failed")) {
String errMsg = "swiftUpload failed , err=" + parser.getLines();
s_logger.warn(errMsg);
return errMsg;
}
}
}
}
return null;
}
String[] swiftList(SwiftTO swift, String container, String rFilename) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " + swift.getAccount() + ":" +
swift.getUserName() + " -K " + swift.getKey() + " list " + container + " " + rFilename);
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result == null && parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
return lines;
} else {
if (result != null) {
String errMsg = "swiftList failed , err=" + result;
s_logger.warn(errMsg);
} else {
String errMsg = "swiftList failed, no lines returns";
s_logger.warn(errMsg);
}
}
return null;
}
String swiftDelete(SwiftTO swift, String container, String object) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " + swift.getAccount() + ":" +
swift.getUserName() + " -K " + swift.getKey() + " delete " + container + " " + object);
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result != null) {
String errMsg = "swiftDelete failed , err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
if (parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
for (String line : lines) {
if (line.contains("Errno") || line.contains("failed")) {
String errMsg = "swiftDelete failed , err=" + parser.getLines();
s_logger.warn(errMsg);
return errMsg;
}
}
}
return null;
}
public Answer execute(DeleteSnapshotsDirCommand cmd) {
DataStoreTO dstore = cmd.getDataStore();
if (dstore instanceof NfsTO) {
NfsTO nfs = (NfsTO)dstore;
String relativeSnapshotPath = cmd.getDirectory();
String parent = getRootDir(nfs.getUrl());
if (relativeSnapshotPath.startsWith(File.separator)) {
relativeSnapshotPath = relativeSnapshotPath.substring(1);
}
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String absoluteSnapshotPath = parent + relativeSnapshotPath;
File snapshotDir = new File(absoluteSnapshotPath);
String details = null;
if (!snapshotDir.exists()) {
details = "snapshot directory " + snapshotDir.getName() + " doesn't exist";
s_logger.debug(details);
return new Answer(cmd, true, details);
}
// delete all files in the directory
String lPath = absoluteSnapshotPath + "/*";
String result = deleteLocalFile(lPath);
if (result != null) {
String errMsg = "failed to delete all snapshots " + lPath + " , err=" + result;
s_logger.warn(errMsg);
return new Answer(cmd, false, errMsg);
}
// delete the directory
if (!snapshotDir.delete()) {
details = "Unable to delete directory " + snapshotDir.getName() + " under snapshot path " + relativeSnapshotPath;
s_logger.debug(details);
return new Answer(cmd, false, details);
}
return new Answer(cmd, true, null);
} else if (dstore instanceof S3TO) {
final S3TO s3 = (S3TO)dstore;
final String path = cmd.getDirectory();
final String bucket = s3.getBucketName();
try {
S3Utils.deleteDirectory(s3, bucket, path);
return new Answer(cmd, true, String.format("Deleted snapshot %1%s from bucket %2$s.", path, bucket));
} catch (Exception e) {
final String errorMessage =
String.format("Failed to delete snapshot %1$s from bucket %2$s due to the following error: %3$s", path, bucket, e.getMessage());
s_logger.error(errorMessage, e);
return new Answer(cmd, false, errorMessage);
}
} else if (dstore instanceof SwiftTO) {
String path = cmd.getDirectory();
String volumeId = StringUtils.substringAfterLast(path, "/"); // assuming
// that
// the
// filename
// is
// the
// last
// section
// in
// the
// path
String result = swiftDelete((SwiftTO)dstore, "V-" + volumeId.toString(), "");
if (result != null) {
String errMsg = "failed to delete snapshot for volume " + volumeId + " , err=" + result;
s_logger.warn(errMsg);
return new Answer(cmd, false, errMsg);
}
return new Answer(cmd, true, "Deleted snapshot " + path + " from swift");
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
private Answer execute(ComputeChecksumCommand cmd) {
String relativeTemplatePath = cmd.getTemplatePath();
DataStoreTO store = cmd.getStore();
if (!(store instanceof NfsTO)) {
return new Answer(cmd, false, "can't handle non nfs data store");
}
NfsTO nfsStore = (NfsTO)store;
String parent = getRootDir(nfsStore.getUrl());
if (relativeTemplatePath.startsWith(File.separator)) {
relativeTemplatePath = relativeTemplatePath.substring(1);
}
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String absoluteTemplatePath = parent + relativeTemplatePath;
MessageDigest digest;
String checksum = null;
File f = new File(absoluteTemplatePath);
InputStream is = null;
byte[] buffer = new byte[8192];
int read = 0;
if (s_logger.isDebugEnabled()) {
s_logger.debug("parent path " + parent + " relative template path " + relativeTemplatePath);
}
try {
digest = MessageDigest.getInstance("MD5");
is = new FileInputStream(f);
while ((read = is.read(buffer)) > 0) {
digest.update(buffer, 0, read);
}
byte[] md5sum = digest.digest();
BigInteger bigInt = new BigInteger(1, md5sum);
checksum = bigInt.toString(16);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Successfully calculated checksum for file " + absoluteTemplatePath + " - " + checksum);
}
} catch (IOException e) {
String logMsg = "Unable to process file for MD5 - " + absoluteTemplatePath;
s_logger.error(logMsg);
return new Answer(cmd, false, checksum);
} catch (NoSuchAlgorithmException e) {
return new Answer(cmd, false, checksum);
} finally {
try {
if (is != null) {
is.close();
}
} catch (IOException e) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Could not close the file " + absoluteTemplatePath);
}
return new Answer(cmd, false, checksum);
}
}
return new Answer(cmd, true, checksum);
}
private void configCerts(KeystoreManager.Certificates certs) {
if (certs == null) {
configureSSL();
} else {
String prvKey = certs.getPrivKey();
String pubCert = certs.getPrivCert();
String certChain = certs.getCertChain();
String rootCACert = certs.getRootCACert();
try {
File prvKeyFile = File.createTempFile("prvkey", null);
String prvkeyPath = prvKeyFile.getAbsolutePath();
try(BufferedWriter prvt_key_file = new BufferedWriter(new FileWriter(prvKeyFile));) {
prvt_key_file.write(prvKey);
}catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
File pubCertFile = File.createTempFile("pubcert", null);
String pubCertFilePath = pubCertFile.getAbsolutePath();
try(BufferedWriter pub_cert_file = new BufferedWriter(new FileWriter(pubCertFile));) {
pub_cert_file.write(pubCert);
}catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
String certChainFilePath = null, rootCACertFilePath = null;
File certChainFile = null, rootCACertFile = null;
if(certChain != null){
certChainFile = File.createTempFile("certchain", null);
certChainFilePath = certChainFile.getAbsolutePath();
try(BufferedWriter cert_chain_out = new BufferedWriter(new FileWriter(certChainFile));) {
cert_chain_out.write(certChain);
}catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
}
if(rootCACert != null){
rootCACertFile = File.createTempFile("rootcert", null);
rootCACertFilePath = rootCACertFile.getAbsolutePath();
try(BufferedWriter root_ca_cert_file = new BufferedWriter(new FileWriter(rootCACertFile));) {
root_ca_cert_file.write(rootCACert);
}catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
}
configureSSL(prvkeyPath, pubCertFilePath, certChainFilePath, rootCACertFilePath);
prvKeyFile.delete();
pubCertFile.delete();
if(certChainFile != null){
certChainFile.delete();
}
if(rootCACertFile != null){
rootCACertFile.delete();
}
} catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
}
}
private Answer execute(SecStorageSetupCommand cmd) {
if (!_inSystemVM) {
return new Answer(cmd, true, null);
}
Answer answer = null;
DataStoreTO dStore = cmd.getDataStore();
if (dStore instanceof NfsTO) {
String secUrl = cmd.getSecUrl();
try {
URI uri = new URI(secUrl);
String nfsHostIp = getUriHostIp(uri);
addRouteToInternalIpOrCidr(_storageGateway, _storageIp, _storageNetmask, nfsHostIp);
String dir = mountUri(uri);
configCerts(cmd.getCerts());
nfsIps.add(nfsHostIp);
answer = new SecStorageSetupAnswer(dir);
} catch (Exception e) {
String msg = "GetRootDir for " + secUrl + " failed due to " + e.toString();
s_logger.error(msg);
answer = new Answer(cmd, false, msg);
}
} else {
// TODO: what do we need to setup for S3/Swift, maybe need to mount
// to some cache storage
answer = new Answer(cmd, true, null);
}
savePostUploadPSK(cmd.getPostUploadKey());
startPostUploadServer();
return answer;
}
private void startPostUploadServer() {
final int PORT = 8210;
final int NO_OF_WORKERS = 15;
final EventLoopGroup bossGroup = new NioEventLoopGroup(1);
final EventLoopGroup workerGroup = new NioEventLoopGroup(NO_OF_WORKERS);
final ServerBootstrap b = new ServerBootstrap();
final NfsSecondaryStorageResource storageResource = this;
b.group(bossGroup, workerGroup);
b.channel(NioServerSocketChannel.class);
b.handler(new LoggingHandler(LogLevel.INFO));
b.childHandler(new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(new HttpRequestDecoder());
pipeline.addLast(new HttpResponseEncoder());
pipeline.addLast(new HttpContentCompressor());
pipeline.addLast(new HttpUploadServerHandler(storageResource));
}
});
new Thread() {
@Override
public void run() {
try {
Channel ch = b.bind(PORT).sync().channel();
s_logger.info(String.format("Started post upload server on port %d with %d workers",PORT,NO_OF_WORKERS));
ch.closeFuture().sync();
} catch (InterruptedException e) {
s_logger.info("Failed to start post upload server");
s_logger.debug("Exception while starting post upload server", e);
} finally {
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
s_logger.info("shutting down post upload server");
}
}
}.start();
s_logger.info("created a thread to start post upload server");
}
private void savePostUploadPSK(String psk) {
try {
FileUtils.writeStringToFile(new File(POST_UPLOAD_KEY_LOCATION),psk, "utf-8");
} catch (IOException ex) {
s_logger.debug("Failed to copy PSK to the file.", ex);
}
}
protected Answer deleteSnapshot(final DeleteCommand cmd) {
DataTO obj = cmd.getData();
DataStoreTO dstore = obj.getDataStore();
if (dstore instanceof NfsTO) {
NfsTO nfs = (NfsTO)dstore;
String parent = getRootDir(nfs.getUrl());
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String snapshotPath = obj.getPath();
if (snapshotPath.startsWith(File.separator)) {
snapshotPath = snapshotPath.substring(1);
}
// check if the passed snapshot path is a directory or not. For ImageCache, path is stored as a directory instead of
// snapshot file name. If so, since backupSnapshot process has already deleted snapshot in cache, so we just do nothing
// and return true.
String fullSnapPath = parent + snapshotPath;
File snapDir = new File(fullSnapPath);
if (snapDir.exists() && snapDir.isDirectory()) {
s_logger.debug("snapshot path " + snapshotPath + " is a directory, already deleted during backup snapshot, so no need to delete");
return new Answer(cmd, true, null);
}
// passed snapshot path is a snapshot file path, then get snapshot directory first
int index = snapshotPath.lastIndexOf("/");
String snapshotName = snapshotPath.substring(index + 1);
snapshotPath = snapshotPath.substring(0, index);
String absoluteSnapshotPath = parent + snapshotPath;
// check if snapshot directory exists
File snapshotDir = new File(absoluteSnapshotPath);
String details = null;
if (!snapshotDir.exists()) {
details = "snapshot directory " + snapshotDir.getName() + " doesn't exist";
s_logger.debug(details);
return new Answer(cmd, true, details);
}
// delete snapshot in the directory if exists
String lPath = absoluteSnapshotPath + "/*" + snapshotName + "*";
String result = deleteLocalFile(lPath);
if (result != null) {
details = "failed to delete snapshot " + lPath + " , err=" + result;
s_logger.warn(details);
return new Answer(cmd, false, details);
}
return new Answer(cmd, true, null);
} else if (dstore instanceof S3TO) {
final S3TO s3 = (S3TO)dstore;
final String path = obj.getPath();
final String bucket = s3.getBucketName();
try {
S3Utils.deleteObject(s3, bucket, path);
return new Answer(cmd, true, String.format("Deleted snapshot %1%s from bucket %2$s.", path, bucket));
} catch (Exception e) {
final String errorMessage =
String.format("Failed to delete snapshot %1$s from bucket %2$s due to the following error: %3$s", path, bucket, e.getMessage());
s_logger.error(errorMessage, e);
return new Answer(cmd, false, errorMessage);
}
} else if (dstore instanceof SwiftTO) {
SwiftTO swiftTO = (SwiftTO)dstore;
String path = obj.getPath();
SwiftUtil.deleteObject(swiftTO, path);
return new Answer(cmd, true, "Deleted snapshot " + path + " from swift");
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
Map<String, TemplateProp> swiftListTemplate(SwiftTO swift) {
String[] containers = SwiftUtil.list(swift, "", null);
if (containers == null) {
return null;
}
Map<String, TemplateProp> tmpltInfos = new HashMap<String, TemplateProp>();
for (String container : containers) {
if (container.startsWith("T-")) {
String[] files = SwiftUtil.list(swift, container, "template.properties");
if (files.length != 1) {
continue;
}
try {
File tempFile = File.createTempFile("template", ".tmp");
File tmpFile = SwiftUtil.getObject(swift, tempFile, container + File.separator + "template.properties");
if (tmpFile == null) {
continue;
}
try (FileReader fr = new FileReader(tmpFile);
BufferedReader brf = new BufferedReader(fr);) {
String line = null;
String uniqName = null;
Long size = null;
String name = null;
while ((line = brf.readLine()) != null) {
if (line.startsWith("uniquename=")) {
uniqName = line.split("=")[1];
} else if (line.startsWith("size=")) {
size = Long.parseLong(line.split("=")[1]);
} else if (line.startsWith("filename=")) {
name = line.split("=")[1];
}
}
tempFile.delete();
if (uniqName != null) {
TemplateProp prop = new TemplateProp(uniqName, container + File.separator + name, size, size, true, false);
tmpltInfos.put(uniqName, prop);
}
} catch (IOException ex)
{
s_logger.debug("swiftListTemplate:Exception:" + ex.getMessage());
continue;
}
} catch (IOException e) {
s_logger.debug("Failed to create templ file:" + e.toString());
continue;
} catch (Exception e) {
s_logger.debug("Failed to get properties: " + e.toString());
continue;
}
}
}
return tmpltInfos;
}
Map<String, TemplateProp> s3ListTemplate(S3TO s3) {
String bucket = s3.getBucketName();
// List the objects in the source directory on S3
final List<S3ObjectSummary> objectSummaries = S3Utils.getDirectory(s3, bucket, TEMPLATE_ROOT_DIR);
if (objectSummaries == null) {
return null;
}
Map<String, TemplateProp> tmpltInfos = new HashMap<String, TemplateProp>();
for (S3ObjectSummary objectSummary : objectSummaries) {
String key = objectSummary.getKey();
// String installPath = StringUtils.substringBeforeLast(key,
// S3Utils.SEPARATOR);
String uniqueName = determineS3TemplateNameFromKey(key);
// TODO: isPublic value, where to get?
TemplateProp tInfo = new TemplateProp(uniqueName, key, objectSummary.getSize(), objectSummary.getSize(), true, false);
tmpltInfos.put(uniqueName, tInfo);
}
return tmpltInfos;
}
Map<Long, TemplateProp> s3ListVolume(S3TO s3) {
String bucket = s3.getBucketName();
// List the objects in the source directory on S3
final List<S3ObjectSummary> objectSummaries = S3Utils.getDirectory(s3, bucket, VOLUME_ROOT_DIR);
if (objectSummaries == null) {
return null;
}
Map<Long, TemplateProp> tmpltInfos = new HashMap<Long, TemplateProp>();
for (S3ObjectSummary objectSummary : objectSummaries) {
String key = objectSummary.getKey();
// String installPath = StringUtils.substringBeforeLast(key,
// S3Utils.SEPARATOR);
Long id = determineS3VolumeIdFromKey(key);
// TODO: how to get volume template name
TemplateProp tInfo = new TemplateProp(id.toString(), key, objectSummary.getSize(), objectSummary.getSize(), true, false);
tmpltInfos.put(id, tInfo);
}
return tmpltInfos;
}
private Answer execute(ListTemplateCommand cmd) {
if (!_inSystemVM) {
return new ListTemplateAnswer(null, null);
}
DataStoreTO store = cmd.getDataStore();
if (store instanceof NfsTO) {
NfsTO nfs = (NfsTO)store;
String secUrl = nfs.getUrl();
String root = getRootDir(secUrl);
Map<String, TemplateProp> templateInfos = _dlMgr.gatherTemplateInfo(root);
return new ListTemplateAnswer(secUrl, templateInfos);
} else if (store instanceof SwiftTO) {
SwiftTO swift = (SwiftTO)store;
Map<String, TemplateProp> templateInfos = swiftListTemplate(swift);
return new ListTemplateAnswer(swift.toString(), templateInfos);
} else if (store instanceof S3TO) {
S3TO s3 = (S3TO)store;
Map<String, TemplateProp> templateInfos = s3ListTemplate(s3);
return new ListTemplateAnswer(s3.getBucketName(), templateInfos);
} else {
return new Answer(cmd, false, "Unsupported image data store: " + store);
}
}
private Answer execute(ListVolumeCommand cmd) {
if (!_inSystemVM) {
return new ListVolumeAnswer(cmd.getSecUrl(), null);
}
DataStoreTO store = cmd.getDataStore();
if (store instanceof NfsTO) {
String root = getRootDir(cmd.getSecUrl());
Map<Long, TemplateProp> templateInfos = _dlMgr.gatherVolumeInfo(root);
return new ListVolumeAnswer(cmd.getSecUrl(), templateInfos);
} else if (store instanceof S3TO) {
S3TO s3 = (S3TO)store;
Map<Long, TemplateProp> templateInfos = s3ListVolume(s3);
return new ListVolumeAnswer(s3.getBucketName(), templateInfos);
} else {
return new Answer(cmd, false, "Unsupported image data store: " + store);
}
}
private Answer execute(SecStorageVMSetupCommand cmd) {
if (!_inSystemVM) {
return new Answer(cmd, true, null);
}
boolean success = true;
StringBuilder result = new StringBuilder();
for (String cidr : cmd.getAllowedInternalSites()) {
if (nfsIps.contains(cidr)) {
/*
* if the internal download ip is the same with secondary
* storage ip, adding internal sites will flush ip route to nfs
* through storage ip.
*/
continue;
}
String tmpresult = allowOutgoingOnPrivate(cidr);
if (tmpresult != null) {
result.append(", ").append(tmpresult);
success = false;
}
}
if (success) {
if (cmd.getCopyPassword() != null && cmd.getCopyUserName() != null) {
String tmpresult = configureAuth(cmd.getCopyUserName(), cmd.getCopyPassword());
if (tmpresult != null) {
result.append("Failed to configure auth for copy ").append(tmpresult);
success = false;
}
}
}
return new Answer(cmd, success, result.toString());
}
private String deleteLocalFile(String fullPath) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("rm -rf " + fullPath);
String result = command.execute();
if (result != null) {
String errMsg = "Failed to delete file " + fullPath + ", err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
return null;
}
public String allowOutgoingOnPrivate(String destCidr) {
if (!_inSystemVM) {
return null;
}
Script command = new Script("/bin/bash", s_logger);
String intf = "eth1";
command.add("-c");
command.add("iptables -I OUTPUT -o " + intf + " -d " + destCidr + " -p tcp -m state --state NEW -m tcp -j ACCEPT");
String result = command.execute();
if (result != null) {
s_logger.warn("Error in allowing outgoing to " + destCidr + ", err=" + result);
return "Error in allowing outgoing to " + destCidr + ", err=" + result;
}
addRouteToInternalIpOrCidr(_localgw, _eth1ip, _eth1mask, destCidr);
return null;
}
private Answer execute(SecStorageFirewallCfgCommand cmd) {
if (!_inSystemVM) {
return new Answer(cmd, true, null);
}
List<String> ipList = new ArrayList<String>();
for (PortConfig pCfg : cmd.getPortConfigs()) {
if (pCfg.isAdd()) {
ipList.add(pCfg.getSourceIp());
}
}
boolean success = true;
String result;
result = configureIpFirewall(ipList, cmd.getIsAppendAIp());
if (result != null) {
success = false;
}
return new Answer(cmd, success, result);
}
private UploadStatusAnswer execute(UploadStatusCommand cmd) {
String entityUuid = cmd.getEntityUuid();
if (uploadEntityStateMap.containsKey(entityUuid)) {
UploadEntity uploadEntity = uploadEntityStateMap.get(entityUuid);
if (uploadEntity.getUploadState() == UploadEntity.Status.ERROR) {
uploadEntityStateMap.remove(entityUuid);
return new UploadStatusAnswer(cmd, UploadStatus.ERROR, uploadEntity.getErrorMessage());
} else if (uploadEntity.getUploadState() == UploadEntity.Status.COMPLETED) {
UploadStatusAnswer answer = new UploadStatusAnswer(cmd, UploadStatus.COMPLETED);
answer.setVirtualSize(uploadEntity.getVirtualSize());
answer.setInstallPath(uploadEntity.getTmpltPath());
answer.setPhysicalSize(uploadEntity.getPhysicalSize());
answer.setDownloadPercent(100);
uploadEntityStateMap.remove(entityUuid);
return answer;
} else if (uploadEntity.getUploadState() == UploadEntity.Status.IN_PROGRESS) {
UploadStatusAnswer answer = new UploadStatusAnswer(cmd, UploadStatus.IN_PROGRESS);
long downloadedSize = FileUtils.sizeOfDirectory(new File(uploadEntity.getInstallPathPrefix()));
int downloadPercent = (int) (100 * downloadedSize / uploadEntity.getContentLength());
answer.setDownloadPercent(Math.min(downloadPercent, 100));
return answer;
}
}
return new UploadStatusAnswer(cmd, UploadStatus.UNKNOWN);
}
protected GetStorageStatsAnswer execute(final GetStorageStatsCommand cmd) {
DataStoreTO store = cmd.getStore();
if (store instanceof S3TO || store instanceof SwiftTO) {
long infinity = Integer.MAX_VALUE;
return new GetStorageStatsAnswer(cmd, infinity, 0L);
}
String rootDir = getRootDir(((NfsTO)store).getUrl());
final long usedSize = getUsedSize(rootDir);
final long totalSize = getTotalSize(rootDir);
if (usedSize == -1 || totalSize == -1) {
return new GetStorageStatsAnswer(cmd, "Unable to get storage stats");
} else {
return new GetStorageStatsAnswer(cmd, totalSize, usedSize);
}
}
protected Answer execute(final DeleteCommand cmd) {
DataTO obj = cmd.getData();
DataObjectType objType = obj.getObjectType();
if (obj.getPath() == null) {
// account for those fake entries for NFS migration to object store
return new Answer(cmd, true, "Object with null install path does not exist on image store , no need to delete");
}
switch (objType) {
case TEMPLATE:
return deleteTemplate(cmd);
case VOLUME:
return deleteVolume(cmd);
case SNAPSHOT:
return deleteSnapshot(cmd);
}
return Answer.createUnsupportedCommandAnswer(cmd);
}
protected Answer deleteTemplate(DeleteCommand cmd) {
DataTO obj = cmd.getData();
DataStoreTO dstore = obj.getDataStore();
if (dstore instanceof NfsTO) {
NfsTO nfs = (NfsTO)dstore;
String relativeTemplatePath = obj.getPath();
String parent = getRootDir(nfs.getUrl());
if (relativeTemplatePath.startsWith(File.separator)) {
relativeTemplatePath = relativeTemplatePath.substring(1);
}
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String absoluteTemplatePath = parent + relativeTemplatePath;
File tmpltPath = new File(absoluteTemplatePath);
File tmpltParent = null;
if(tmpltPath.exists() && tmpltPath.isDirectory()) {
tmpltParent = tmpltPath;
} else {
tmpltParent = tmpltPath.getParentFile();
}
String details = null;
if (!tmpltParent.exists()) {
details = "template parent directory " + tmpltParent.getName() + " doesn't exist";
s_logger.debug(details);
return new Answer(cmd, true, details);
}
File[] tmpltFiles = tmpltParent.listFiles();
if (tmpltFiles == null || tmpltFiles.length == 0) {
details = "No files under template parent directory " + tmpltParent.getName();
s_logger.debug(details);
} else {
boolean found = false;
for (File f : tmpltFiles) {
if (!found && f.getName().equals("template.properties")) {
found = true;
}
// KVM HA monitor makes a mess in the templates with its
// heartbeat tests
// Don't let this stop us from cleaning up the template
if (f.isDirectory() && f.getName().equals("KVMHA")) {
s_logger.debug("Deleting KVMHA directory contents from template location");
File[] haFiles = f.listFiles();
for (File haFile : haFiles) {
haFile.delete();
}
}
if (!f.delete()) {
return new Answer(cmd, false, "Unable to delete file " + f.getName() + " under Template path " + relativeTemplatePath);
}
}
if (!found) {
details = "Can not find template.properties under " + tmpltParent.getName();
s_logger.debug(details);
}
}
if (!tmpltParent.delete()) {
details = "Unable to delete directory " + tmpltParent.getName() + " under Template path " + relativeTemplatePath;
s_logger.debug(details);
return new Answer(cmd, false, details);
}
return new Answer(cmd, true, null);
} else if (dstore instanceof S3TO) {
final S3TO s3 = (S3TO)dstore;
final String path = obj.getPath();
final String bucket = s3.getBucketName();
try {
S3Utils.deleteDirectory(s3, bucket, path);
return new Answer(cmd, true, String.format("Deleted template %1$s from bucket %2$s.", path, bucket));
} catch (Exception e) {
final String errorMessage =
String.format("Failed to delete template %1$s from bucket %2$s due to the following error: %3$s", path, bucket, e.getMessage());
s_logger.error(errorMessage, e);
return new Answer(cmd, false, errorMessage);
}
} else if (dstore instanceof SwiftTO) {
SwiftTO swift = (SwiftTO)dstore;
String container = "T-" + obj.getId();
String object = "";
try {
String result = swiftDelete(swift, container, object);
if (result != null) {
String errMsg = "failed to delete object " + container + "/" + object + " , err=" + result;
s_logger.warn(errMsg);
return new Answer(cmd, false, errMsg);
}
return new Answer(cmd, true, "success");
} catch (Exception e) {
String errMsg = cmd + " Command failed due to " + e.toString();
s_logger.warn(errMsg, e);
return new Answer(cmd, false, errMsg);
}
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
protected Answer deleteVolume(final DeleteCommand cmd) {
DataTO obj = cmd.getData();
DataStoreTO dstore = obj.getDataStore();
if (dstore instanceof NfsTO) {
NfsTO nfs = (NfsTO)dstore;
String relativeVolumePath = obj.getPath();
String parent = getRootDir(nfs.getUrl());
if (relativeVolumePath.startsWith(File.separator)) {
relativeVolumePath = relativeVolumePath.substring(1);
}
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String absoluteVolumePath = parent + relativeVolumePath;
File volPath = new File(absoluteVolumePath);
File tmpltParent = null;
if (volPath.exists() && volPath.isDirectory()) {
// for vmware, absoluteVolumePath represents a directory where volume files are located.
tmpltParent = volPath;
} else {
// for other hypervisors, the volume .vhd or .qcow2 file path is passed
tmpltParent = new File(absoluteVolumePath).getParentFile();
}
String details = null;
if (!tmpltParent.exists()) {
details = "volume parent directory " + tmpltParent.getName() + " doesn't exist";
s_logger.debug(details);
return new Answer(cmd, true, details);
}
File[] tmpltFiles = tmpltParent.listFiles();
if (tmpltFiles == null || tmpltFiles.length == 0) {
details = "No files under volume parent directory " + tmpltParent.getName();
s_logger.debug(details);
} else {
boolean found = false;
for (File f : tmpltFiles) {
if (!found && f.getName().equals("volume.properties")) {
found = true;
}
// KVM HA monitor makes a mess in the templates with its
// heartbeat tests
// Don't let this stop us from cleaning up the template
if (f.isDirectory() && f.getName().equals("KVMHA")) {
s_logger.debug("Deleting KVMHA directory contents from template location");
File[] haFiles = f.listFiles();
for (File haFile : haFiles) {
haFile.delete();
}
}
if (!f.delete()) {
return new Answer(cmd, false, "Unable to delete file " + f.getName() + " under Volume path " + tmpltParent.getPath());
}
}
if (!found) {
details = "Can not find volume.properties under " + tmpltParent.getName();
s_logger.debug(details);
}
}
if (!tmpltParent.delete()) {
details = "Unable to delete directory " + tmpltParent.getName() + " under Volume path " + tmpltParent.getPath();
s_logger.debug(details);
return new Answer(cmd, false, details);
}
return new Answer(cmd, true, null);
} else if (dstore instanceof S3TO) {
final S3TO s3 = (S3TO)dstore;
final String path = obj.getPath();
final String bucket = s3.getBucketName();
try {
S3Utils.deleteDirectory(s3, bucket, path);
return new Answer(cmd, true, String.format("Deleted volume %1%s from bucket %2$s.", path, bucket));
} catch (Exception e) {
final String errorMessage = String.format("Failed to delete volume %1$s from bucket %2$s due to the following error: %3$s", path, bucket, e.getMessage());
s_logger.error(errorMessage, e);
return new Answer(cmd, false, errorMessage);
}
} else if (dstore instanceof SwiftTO) {
Long volumeId = obj.getId();
String path = obj.getPath();
String filename = StringUtils.substringAfterLast(path, "/"); // assuming
// that
// the
// filename
// is
// the
// last
// section
// in
// the
// path
String result = swiftDelete((SwiftTO)dstore, "V-" + volumeId.toString(), filename);
if (result != null) {
String errMsg = "failed to delete volume " + filename + " , err=" + result;
s_logger.warn(errMsg);
return new Answer(cmd, false, errMsg);
}
return new Answer(cmd, true, "Deleted volume " + path + " from swift");
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
@Override
synchronized public String getRootDir(String secUrl) {
if (!_inSystemVM) {
return _parent;
}
try {
URI uri = new URI(secUrl);
String dir = mountUri(uri);
return _parent + "/" + dir;
} catch (Exception e) {
String msg = "GetRootDir for " + secUrl + " failed due to " + e.toString();
s_logger.error(msg, e);
throw new CloudRuntimeException(msg);
}
}
protected long getUsedSize(String rootDir) {
return _storage.getUsedSpace(rootDir);
}
protected long getTotalSize(String rootDir) {
return _storage.getTotalSpace(rootDir);
}
protected long convertFilesystemSize(final String size) {
if (size == null || size.isEmpty()) {
return -1;
}
long multiplier = 1;
if (size.endsWith("T")) {
multiplier = 1024l * 1024l * 1024l * 1024l;
} else if (size.endsWith("G")) {
multiplier = 1024l * 1024l * 1024l;
} else if (size.endsWith("M")) {
multiplier = 1024l * 1024l;
} else {
assert (false) : "Well, I have no idea what this is: " + size;
}
return (long)(Double.parseDouble(size.substring(0, size.length() - 1)) * multiplier);
}
@Override
public Type getType() {
if (SecondaryStorageVm.Role.templateProcessor.toString().equals(_role)) {
return Host.Type.SecondaryStorage;
}
return Host.Type.SecondaryStorageCmdExecutor;
}
@Override
public PingCommand getCurrentStatus(final long id) {
return new PingStorageCommand(Host.Type.Storage, id, new HashMap<String, Boolean>());
}
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
_eth1ip = (String)params.get("eth1ip");
_eth1mask = (String)params.get("eth1mask");
if (_eth1ip != null) { // can only happen inside service vm
params.put("private.network.device", "eth1");
} else {
s_logger.warn("eth1ip parameter has not been configured, assuming that we are not inside a system vm");
}
String eth2ip = (String)params.get("eth2ip");
if (eth2ip != null) {
params.put("public.network.device", "eth2");
}
_publicIp = (String)params.get("eth2ip");
_hostname = (String)params.get("name");
String inSystemVM = (String)params.get("secondary.storage.vm");
if (inSystemVM == null || "true".equalsIgnoreCase(inSystemVM)) {
s_logger.debug("conf secondary.storage.vm is true, act as if executing in SSVM");
_inSystemVM = true;
}
_storageIp = (String)params.get("storageip");
if (_storageIp == null && _inSystemVM) {
s_logger.warn("There is no storageip in /proc/cmdline, something wrong!");
}
_storageNetmask = (String)params.get("storagenetmask");
_storageGateway = (String)params.get("storagegateway");
super.configure(name, params);
_params = params;
String value = (String)params.get("scripts.timeout");
_timeout = NumbersUtil.parseInt(value, 1440) * 1000;
_storage = (StorageLayer)params.get(StorageLayer.InstanceConfigKey);
configureStorageLayerClass(params);
if (_inSystemVM) {
_storage.mkdirs(_parent);
}
_configSslScr = Script.findScript(getDefaultScriptsDir(), "config_ssl.sh");
if (_configSslScr != null) {
s_logger.info("config_ssl.sh found in " + _configSslScr);
}
_configAuthScr = Script.findScript(getDefaultScriptsDir(), "config_auth.sh");
if (_configAuthScr != null) {
s_logger.info("config_auth.sh found in " + _configAuthScr);
}
_configIpFirewallScr = Script.findScript(getDefaultScriptsDir(), "ipfirewall.sh");
if (_configIpFirewallScr != null) {
s_logger.info("_configIpFirewallScr found in " + _configIpFirewallScr);
}
createTemplateFromSnapshotXenScript = Script.findScript(getDefaultScriptsDir(), "create_privatetemplate_from_snapshot_xen.sh");
if (createTemplateFromSnapshotXenScript == null) {
throw new ConfigurationException("create_privatetemplate_from_snapshot_xen.sh not found in " + getDefaultScriptsDir());
}
_role = (String)params.get("role");
if (_role == null) {
_role = SecondaryStorageVm.Role.templateProcessor.toString();
}
s_logger.info("Secondary storage runs in role " + _role);
_guid = (String)params.get("guid");
if (_guid == null) {
throw new ConfigurationException("Unable to find the guid");
}
_dc = (String)params.get("zone");
if (_dc == null) {
throw new ConfigurationException("Unable to find the zone");
}
_pod = (String)params.get("pod");
_instance = (String)params.get("instance");
if (!_inSystemVM) {
_parent = (String)params.get("mount.path");
}
if (_inSystemVM) {
_localgw = (String)params.get("localgw");
if (_localgw != null) { // can only happen inside service vm
String mgmtHost = (String)params.get("host");
addRouteToInternalIpOrCidr(_localgw, _eth1ip, _eth1mask, mgmtHost);
String internalDns1 = (String)params.get("internaldns1");
if (internalDns1 == null) {
s_logger.warn("No DNS entry found during configuration of NfsSecondaryStorage");
} else {
addRouteToInternalIpOrCidr(_localgw, _eth1ip, _eth1mask, internalDns1);
}
String internalDns2 = (String)params.get("internaldns2");
if (internalDns2 != null) {
addRouteToInternalIpOrCidr(_localgw, _eth1ip, _eth1mask, internalDns2);
}
}
startAdditionalServices();
_params.put("install.numthreads", "50");
_params.put("secondary.storage.vm", "true");
}
try {
_params.put(StorageLayer.InstanceConfigKey, _storage);
_dlMgr = new DownloadManagerImpl();
_dlMgr.configure("DownloadManager", _params);
_upldMgr = new UploadManagerImpl();
_upldMgr.configure("UploadManager", params);
} catch (ConfigurationException e) {
s_logger.warn("Caught problem while configuring DownloadManager", e);
return false;
}
return true;
}
protected void configureStorageLayerClass(Map<String, Object> params) throws ConfigurationException {
String value;
if (_storage == null) {
value = (String)params.get(StorageLayer.ClassConfigKey);
if (value == null) {
value = "com.cloud.storage.JavaStorageLayer";
}
try {
Class<?> clazz = Class.forName(value);
_storage = (StorageLayer)clazz.newInstance();
_storage.configure("StorageLayer", params);
} catch (ClassNotFoundException e) {
throw new ConfigurationException("Unable to find class " + value);
} catch (InstantiationException e) {
throw new ConfigurationException("Unable to find class " + value);
} catch (IllegalAccessException e) {
throw new ConfigurationException("Unable to find class " + value);
}
}
}
private void startAdditionalServices() {
if (!_inSystemVM) {
return;
}
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("if [ -f /etc/init.d/ssh ]; then service ssh restart; else service sshd restart; fi ");
String result = command.execute();
if (result != null) {
s_logger.warn("Error in starting sshd service err=" + result);
}
command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("iptables -I INPUT -i eth1 -p tcp -m state --state NEW -m tcp --dport 3922 -j ACCEPT");
result = command.execute();
if (result != null) {
s_logger.warn("Error in opening up ssh port err=" + result);
}
}
private void addRouteToInternalIpOrCidr(String localgw, String eth1ip, String eth1mask, String destIpOrCidr) {
if (!_inSystemVM) {
return;
}
s_logger.debug("addRouteToInternalIp: localgw=" + localgw + ", eth1ip=" + eth1ip + ", eth1mask=" + eth1mask + ",destIp=" + destIpOrCidr);
if (destIpOrCidr == null) {
s_logger.debug("addRouteToInternalIp: destIp is null");
return;
}
if (!NetUtils.isValidIp(destIpOrCidr) && !NetUtils.isValidCIDR(destIpOrCidr)) {
s_logger.warn(" destIp is not a valid ip address or cidr destIp=" + destIpOrCidr);
return;
}
boolean inSameSubnet = false;
if (NetUtils.isValidIp(destIpOrCidr)) {
if (eth1ip != null && eth1mask != null) {
inSameSubnet = NetUtils.sameSubnet(eth1ip, destIpOrCidr, eth1mask);
} else {
s_logger.warn("addRouteToInternalIp: unable to determine same subnet: _eth1ip=" + eth1ip + ", dest ip=" + destIpOrCidr + ", _eth1mask=" + eth1mask);
}
} else {
inSameSubnet = NetUtils.isNetworkAWithinNetworkB(destIpOrCidr, NetUtils.ipAndNetMaskToCidr(eth1ip, eth1mask));
}
if (inSameSubnet) {
s_logger.debug("addRouteToInternalIp: dest ip " + destIpOrCidr + " is in the same subnet as eth1 ip " + eth1ip);
return;
}
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("ip route delete " + destIpOrCidr);
command.execute();
command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("ip route add " + destIpOrCidr + " via " + localgw);
String result = command.execute();
if (result != null) {
s_logger.warn("Error in configuring route to internal ip err=" + result);
} else {
s_logger.debug("addRouteToInternalIp: added route to internal ip=" + destIpOrCidr + " via " + localgw);
}
}
private void configureSSL() {
if (!_inSystemVM) {
return;
}
Script command = new Script(_configSslScr);
command.add("-i", _publicIp);
command.add("-h", _hostname);
String result = command.execute();
if (result != null) {
s_logger.warn("Unable to configure httpd to use ssl");
}
}
private void configureSSL(String prvkeyPath, String prvCertPath, String certChainPath, String rootCACert) {
if (!_inSystemVM) {
return;
}
Script command = new Script(_configSslScr);
command.add("-i", _publicIp);
command.add("-h", _hostname);
command.add("-k", prvkeyPath);
command.add("-p", prvCertPath);
if (certChainPath != null) {
command.add("-t", certChainPath);
}
if (rootCACert != null) {
command.add("-u", rootCACert);
}
String result = command.execute();
if (result != null) {
s_logger.warn("Unable to configure httpd to use ssl");
}
}
private String configureAuth(String user, String passwd) {
Script command = new Script(_configAuthScr);
command.add(user);
command.add(passwd);
String result = command.execute();
if (result != null) {
s_logger.warn("Unable to configure httpd to use auth");
}
return result;
}
private String configureIpFirewall(List<String> ipList, boolean isAppend) {
Script command = new Script(_configIpFirewallScr);
command.add(String.valueOf(isAppend));
for (String ip : ipList) {
command.add(ip);
}
String result = command.execute();
if (result != null) {
s_logger.warn("Unable to configure firewall for command : " + command);
}
return result;
}
/**
* Mount remote device named on local file system on subfolder of _parent
* field.
* <p>
*
* Supported schemes are "nfs" and "cifs".
* <p>
*
* CIFS parameters are documented with mount.cifs at
* http://linux.die.net/man/8/mount.cifs
* For simplicity, when a URI is used to specify a CIFS share,
* options such as domain,user,password are passed as query parameters.
*
* @param uri
* crresponding to the remote device. Will throw for unsupported
* scheme.
* @return name of folder in _parent that device was mounted.
* @throws UnknownHostException
*/
protected String mountUri(URI uri) throws UnknownHostException {
String uriHostIp = getUriHostIp(uri);
String nfsPath = uriHostIp + ":" + uri.getPath();
// Single means of calculating mount directory regardless of scheme
String dir = UUID.nameUUIDFromBytes(nfsPath.getBytes()).toString();
String localRootPath = _parent + "/" + dir;
// remote device syntax varies by scheme.
String remoteDevice;
if (uri.getScheme().equals("cifs")) {
remoteDevice = "//" + uriHostIp + uri.getPath();
s_logger.debug("Mounting device with cifs-style path of " + remoteDevice);
} else {
remoteDevice = nfsPath;
s_logger.debug("Mounting device with nfs-style path of " + remoteDevice);
}
mount(localRootPath, remoteDevice, uri);
return dir;
}
protected void umount(String localRootPath, URI uri) {
ensureLocalRootPathExists(localRootPath, uri);
if (!mountExists(localRootPath, uri)) {
return;
}
Script command = new Script(!_inSystemVM, "mount", _timeout, s_logger);
command.add(localRootPath);
String result = command.execute();
if (result != null) {
// Fedora Core 12 errors out with any -o option executed from java
String errMsg = "Unable to umount " + localRootPath + " due to " + result;
s_logger.error(errMsg);
File file = new File(localRootPath);
if (file.exists()) {
file.delete();
}
throw new CloudRuntimeException(errMsg);
}
s_logger.debug("Successfully umounted " + localRootPath);
}
protected void mount(String localRootPath, String remoteDevice, URI uri) {
s_logger.debug("mount " + uri.toString() + " on " + localRootPath);
ensureLocalRootPathExists(localRootPath, uri);
if (mountExists(localRootPath, uri)) {
return;
}
attemptMount(localRootPath, remoteDevice, uri);
// XXX: Adding the check for creation of snapshots dir here. Might have
// to move it somewhere more logical later.
checkForSnapshotsDir(localRootPath);
checkForVolumesDir(localRootPath);
}
protected void attemptMount(String localRootPath, String remoteDevice, URI uri) {
String result;
s_logger.debug("Make cmdline call to mount " + remoteDevice + " at " + localRootPath + " based on uri " + uri);
Script command = new Script(!_inSystemVM, "mount", _timeout, s_logger);
String scheme = uri.getScheme().toLowerCase();
command.add("-t", scheme);
if (scheme.equals("nfs")) {
if ("Mac OS X".equalsIgnoreCase(System.getProperty("os.name"))) {
// See http://wiki.qnap.com/wiki/Mounting_an_NFS_share_from_OS_X
command.add("-o", "resvport");
}
if (_inSystemVM) {
command.add("-o", "soft,timeo=133,retrans=2147483647,tcp,acdirmax=0,acdirmin=0");
}
} else if (scheme.equals("cifs")) {
String extraOpts = parseCifsMountOptions(uri);
// nfs acdirmax / acdirmin correspoonds to CIFS actimeo (see
// http://linux.die.net/man/8/mount.cifs)
// no equivalent to nfs timeo, retrans or tcp in CIFS
// todo: allow security mode to be set.
command.add("-o", extraOpts + "soft,actimeo=0");
} else {
String errMsg = "Unsupported storage device scheme " + scheme + " in uri " + uri.toString();
s_logger.error(errMsg);
throw new CloudRuntimeException(errMsg);
}
command.add(remoteDevice);
command.add(localRootPath);
result = command.execute();
if (result != null) {
// Fedora Core 12 errors out with any -o option executed from java
String errMsg = "Unable to mount " + remoteDevice + " at " + localRootPath + " due to " + result;
s_logger.error(errMsg);
File file = new File(localRootPath);
if (file.exists()) {
file.delete();
}
throw new CloudRuntimeException(errMsg);
}
s_logger.debug("Successfully mounted " + remoteDevice + " at " + localRootPath);
}
protected String parseCifsMountOptions(URI uri) {
List<NameValuePair> args = URLEncodedUtils.parse(uri, "UTF-8");
boolean foundUser = false;
boolean foundPswd = false;
StringBuilder extraOpts = new StringBuilder();
for (NameValuePair nvp : args) {
String name = nvp.getName();
if (name.equals("user")) {
foundUser = true;
s_logger.debug("foundUser is" + foundUser);
} else if (name.equals("password")) {
foundPswd = true;
s_logger.debug("password is present in uri");
}
extraOpts.append(name + "=" + nvp.getValue() + ",");
}
if (s_logger.isDebugEnabled()) {
s_logger.error("extraOpts now " + extraOpts);
}
if (!foundUser || !foundPswd) {
String errMsg =
"Missing user and password from URI. Make sure they" + "are in the query string and separated by '&'. E.g. "
+ "cifs://example.com/some_share?user=foo&password=bar";
s_logger.error(errMsg);
throw new CloudRuntimeException(errMsg);
}
return extraOpts.toString();
}
protected boolean mountExists(String localRootPath, URI uri) {
Script script = null;
script = new Script(!_inSystemVM, "mount", _timeout, s_logger);
List<String> res = new ArrayList<String>();
ZfsPathParser parser = new ZfsPathParser(localRootPath);
script.execute(parser);
res.addAll(parser.getPaths());
for (String s : res) {
if (s.contains(localRootPath)) {
s_logger.debug("Some device already mounted at " + localRootPath + ", no need to mount " + uri.toString());
return true;
}
}
return false;
}
protected void ensureLocalRootPathExists(String localRootPath, URI uri) {
s_logger.debug("making available " + localRootPath + " on " + uri.toString());
File file = new File(localRootPath);
s_logger.debug("local folder for mount will be " + file.getPath());
if (!file.exists()) {
s_logger.debug("create mount point: " + file.getPath());
_storage.mkdir(file.getPath());
// Need to check after mkdir to allow O/S to complete operation
if (!file.exists()) {
String errMsg = "Unable to create local folder for: " + localRootPath + " in order to mount " + uri.toString();
s_logger.error(errMsg);
throw new CloudRuntimeException(errMsg);
}
}
}
protected String getUriHostIp(URI uri) throws UnknownHostException {
String nfsHost = uri.getHost();
InetAddress nfsHostAddr = InetAddress.getByName(nfsHost);
String nfsHostIp = nfsHostAddr.getHostAddress();
s_logger.info("Determined host " + nfsHost + " corresponds to IP " + nfsHostIp);
return nfsHostIp;
}
@Override
public boolean start() {
return true;
}
@Override
public boolean stop() {
return true;
}
@Override
public StartupCommand[] initialize() {
final StartupSecondaryStorageCommand cmd = new StartupSecondaryStorageCommand();
fillNetworkInformation(cmd);
if (_publicIp != null) {
cmd.setPublicIpAddress(_publicIp);
}
if (_inSystemVM) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("ln -sf " + _parent + " /var/www/html/copy");
String result = command.execute();
if (result != null) {
s_logger.warn("Error in linking err=" + result);
return null;
}
}
return new StartupCommand[] {cmd};
}
protected boolean checkForSnapshotsDir(String mountPoint) {
String snapshotsDirLocation = mountPoint + File.separator + "snapshots";
return createDir("snapshots", snapshotsDirLocation, mountPoint);
}
protected boolean checkForVolumesDir(String mountPoint) {
String volumesDirLocation = mountPoint + "/" + "volumes";
return createDir("volumes", volumesDirLocation, mountPoint);
}
protected boolean createDir(String dirName, String dirLocation, String mountPoint) {
boolean dirExists = false;
File dir = new File(dirLocation);
if (dir.exists()) {
if (dir.isDirectory()) {
s_logger.debug(dirName + " already exists on secondary storage, and is mounted at " + mountPoint);
dirExists = true;
} else {
if (dir.delete() && _storage.mkdir(dirLocation)) {
dirExists = true;
}
}
} else if (_storage.mkdir(dirLocation)) {
dirExists = true;
}
if (dirExists) {
s_logger.info(dirName + " directory created/exists on Secondary Storage.");
} else {
s_logger.info(dirName + " directory does not exist on Secondary Storage.");
}
return dirExists;
}
@Override
protected String getDefaultScriptsDir() {
return "./scripts/storage/secondary";
}
@Override
public void setName(String name) {
// TODO Auto-generated method stub
}
@Override
public void setConfigParams(Map<String, Object> params) {
// TODO Auto-generated method stub
}
@Override
public Map<String, Object> getConfigParams() {
// TODO Auto-generated method stub
return null;
}
@Override
public int getRunLevel() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setRunLevel(int level) {
// TODO Auto-generated method stub
}
@Override
public void fillNetworkInformation(final StartupCommand cmd) {
final String dummyMac = "00:06:0A:0B:0C:0D";
final String dummyNetmask = "255.255.255.0";
if (!_inSystemVM) {
cmd.setPrivateIpAddress(_eth1ip);
cmd.setPrivateMacAddress(dummyMac);
cmd.setPrivateNetmask(dummyNetmask);
cmd.setPublicIpAddress(_publicIp);
cmd.setPublicMacAddress(dummyMac);
cmd.setPublicNetmask(dummyNetmask);
cmd.setName(_hostname);
} else {
super.fillNetworkInformation(cmd);
}
}
private String getScriptLocation(UploadEntity.ResourceType resourceType) {
String scriptsDir = (String) _params.get("template.scripts.dir");
if (scriptsDir == null) {
scriptsDir = "scripts/storage/secondary";
}
String scriptname = null;
if (resourceType == UploadEntity.ResourceType.VOLUME) {
scriptname = "createvolume.sh";
} else if (resourceType == UploadEntity.ResourceType.TEMPLATE) {
scriptname = "createtmplt.sh";
} else {
throw new InvalidParameterValueException("cannot find script for resource type: " + resourceType);
}
return Script.findScript(scriptsDir, scriptname);
}
public UploadEntity createUploadEntity(String uuid, String metadata, long contentLength) {
TemplateOrVolumePostUploadCommand cmd = getTemplateOrVolumePostUploadCmd(metadata);
UploadEntity uploadEntity = null;
if(cmd == null ){
String errorMessage = "unable decode and deserialize metadata.";
updateStateMapWithError(uuid, errorMessage);
throw new InvalidParameterValueException(errorMessage);
} else {
uuid = cmd.getEntityUUID();
if (isOneTimePostUrlUsed(cmd)) {
uploadEntity = uploadEntityStateMap.get(uuid);
StringBuilder errorMessage = new StringBuilder("The one time post url is already used");
if (uploadEntity != null) {
errorMessage.append(" and the upload is in ").append(uploadEntity.getUploadState()).append(" state.");
}
throw new InvalidParameterValueException(errorMessage.toString());
}
int maxSizeInGB = Integer.parseInt(cmd.getMaxUploadSize());
int contentLengthInGB = getSizeInGB(contentLength);
if (contentLengthInGB > maxSizeInGB) {
String errorMessage = "Maximum file upload size exceeded. Content Length received: " + contentLengthInGB + "GB. Maximum allowed size: " + maxSizeInGB + "GB.";
updateStateMapWithError(uuid, errorMessage);
throw new InvalidParameterValueException(errorMessage);
}
checkSecondaryStorageResourceLimit(cmd, contentLengthInGB);
try {
String absolutePath = cmd.getAbsolutePath();
uploadEntity = new UploadEntity(uuid, cmd.getEntityId(), UploadEntity.Status.IN_PROGRESS, cmd.getName(), absolutePath);
uploadEntity.setMetaDataPopulated(true);
uploadEntity.setResourceType(UploadEntity.ResourceType.valueOf(cmd.getType()));
uploadEntity.setFormat(Storage.ImageFormat.valueOf(cmd.getImageFormat()));
//relative path with out ssvm mount info.
uploadEntity.setTemplatePath(absolutePath);
String dataStoreUrl = cmd.getDataTo();
String installPathPrefix = this.getRootDir(dataStoreUrl) + File.separator + absolutePath;
uploadEntity.setInstallPathPrefix(installPathPrefix);
uploadEntity.setHvm(cmd.getRequiresHvm());
uploadEntity.setChksum(cmd.getChecksum());
uploadEntity.setMaxSizeInGB(maxSizeInGB);
uploadEntity.setDescription(cmd.getDescription());
uploadEntity.setContentLength(contentLength);
// create a install dir
if (!_storage.exists(installPathPrefix)) {
_storage.mkdir(installPathPrefix);
}
uploadEntityStateMap.put(uuid, uploadEntity);
} catch (Exception e) {
//upload entity will be null incase an exception occurs and the handler will not proceed.
s_logger.error("exception occurred while creating upload entity ", e);
updateStateMapWithError(uuid, e.getMessage());
}
}
return uploadEntity;
}
private synchronized void checkSecondaryStorageResourceLimit(TemplateOrVolumePostUploadCommand cmd, int contentLengthInGB) {
String rootDir = this.getRootDir(cmd.getDataTo()) + File.separator;
long accountId = cmd.getAccountId();
long accountTemplateDirSize = 0;
File accountTemplateDir = new File(rootDir + getTemplatePathForAccount(accountId));
if(accountTemplateDir.exists()) {
FileUtils.sizeOfDirectory(accountTemplateDir);
}
long accountVolumeDirSize = 0;
File accountVolumeDir = new File(rootDir + getVolumePathForAccount(accountId));
if(accountVolumeDir.exists()) {
accountVolumeDirSize = FileUtils.sizeOfDirectory(accountVolumeDir);
}
long accountSnapshotDirSize = 0;
File accountSnapshotDir = new File(rootDir + getSnapshotPathForAccount(accountId));
if(accountSnapshotDir.exists()) {
accountSnapshotDirSize = FileUtils.sizeOfDirectory(accountSnapshotDir);
}
s_logger.debug("accountTemplateDirSize: " + accountTemplateDirSize + " accountSnapshotDirSize: " +accountSnapshotDirSize + " accountVolumeDirSize: " +
accountVolumeDirSize);
int accountDirSizeInGB = getSizeInGB(accountTemplateDirSize + accountSnapshotDirSize + accountVolumeDirSize);
int defaultMaxAccountSecondaryStorageInGB = Integer.parseInt(cmd.getDefaultMaxAccountSecondaryStorage());
if ((accountDirSizeInGB + contentLengthInGB) > defaultMaxAccountSecondaryStorageInGB) {
s_logger.error("accountDirSizeInGb: " + accountDirSizeInGB + " defaultMaxAccountSecondaryStorageInGB: " + defaultMaxAccountSecondaryStorageInGB + " contentLengthInGB:"
+ contentLengthInGB);
String errorMessage = "Maximum number of resources of type secondary_storage for account has exceeded";
updateStateMapWithError(cmd.getEntityUUID(), errorMessage);
throw new InvalidParameterValueException(errorMessage);
}
}
private String getVolumePathForAccount(long accountId) {
return TemplateConstants.DEFAULT_VOLUME_ROOT_DIR + "/" + accountId;
}
private String getTemplatePathForAccount(long accountId) {
return TemplateConstants.DEFAULT_TMPLT_ROOT_DIR + "/" + TemplateConstants.DEFAULT_TMPLT_FIRST_LEVEL_DIR + accountId;
}
private String getSnapshotPathForAccount(long accountId) {
return TemplateConstants.DEFAULT_SNAPSHOT_ROOT_DIR + "/" + accountId;
}
private boolean isOneTimePostUrlUsed(TemplateOrVolumePostUploadCommand cmd) {
String uuid = cmd.getEntityUUID();
String uploadPath = this.getRootDir(cmd.getDataTo()) + File.separator + cmd.getAbsolutePath();
return uploadEntityStateMap.containsKey(uuid) || new File(uploadPath).exists();
}
private int getSizeInGB(long sizeInBytes) {
return (int)Math.ceil(sizeInBytes * 1.0d / (1024 * 1024 * 1024));
}
public String postUpload(String uuid, String filename) {
UploadEntity uploadEntity = uploadEntityStateMap.get(uuid);
int installTimeoutPerGig = 180 * 60 * 1000;
String resourcePath = uploadEntity.getInstallPathPrefix();
String finalResourcePath = uploadEntity.getTmpltPath(); // template download
UploadEntity.ResourceType resourceType = uploadEntity.getResourceType();
String fileSavedTempLocation = uploadEntity.getInstallPathPrefix() + "/" + filename;
String uploadedFileExtension = FilenameUtils.getExtension(filename);
String userSelectedFormat= uploadEntity.getFormat().toString();
if(uploadedFileExtension.equals("zip") || uploadedFileExtension.equals("bz2") || uploadedFileExtension.equals("gz")) {
userSelectedFormat += "." + uploadedFileExtension;
}
String formatError = ImageStoreUtil.checkTemplateFormat(fileSavedTempLocation, userSelectedFormat);
if(StringUtils.isNotBlank(formatError)) {
String errorString = "File type mismatch between uploaded file and selected format. Selected file format: " + userSelectedFormat + ". Received: " + formatError;
s_logger.error(errorString);
return errorString;
}
int imgSizeGigs = getSizeInGB(_storage.getSize(fileSavedTempLocation));
int maxSize = uploadEntity.getMaxSizeInGB();
if(imgSizeGigs > maxSize) {
String errorMessage = "Maximum file upload size exceeded. Physical file size: " + imgSizeGigs + "GB. Maximum allowed size: " + maxSize + "GB.";
s_logger.error(errorMessage);
return errorMessage;
}
imgSizeGigs++; // add one just in case
long timeout = (long)imgSizeGigs * installTimeoutPerGig;
Script scr = new Script(getScriptLocation(resourceType), timeout, s_logger);
scr.add("-s", Integer.toString(imgSizeGigs));
scr.add("-S", Long.toString(UploadEntity.s_maxTemplateSize));
if (uploadEntity.getDescription() != null && uploadEntity.getDescription().length() > 1) {
scr.add("-d", uploadEntity.getDescription());
}
if (uploadEntity.isHvm()) {
scr.add("-h");
}
String checkSum = uploadEntity.getChksum();
if (StringUtils.isNotBlank(checkSum)) {
scr.add("-c", checkSum);
}
// add options common to ISO and template
String extension = uploadEntity.getFormat().getFileExtension();
String templateName = "";
if (extension.equals("iso")) {
templateName = uploadEntity.getUuid().trim().replace(" ", "_");
} else {
try {
templateName = UUID.nameUUIDFromBytes((uploadEntity.getFilename() + System.currentTimeMillis()).getBytes("UTF-8")).toString();
} catch (UnsupportedEncodingException e) {
templateName = uploadEntity.getUuid().trim().replace(" ", "_");
}
}
// run script to mv the temporary template file to the final template
// file
String templateFilename = templateName + "." + extension;
uploadEntity.setTemplatePath(finalResourcePath + "/" + templateFilename);
scr.add("-n", templateFilename);
scr.add("-t", resourcePath);
scr.add("-f", fileSavedTempLocation); // this is the temporary
// template file downloaded
if (uploadEntity.getChksum() != null && uploadEntity.getChksum().length() > 1) {
scr.add("-c", uploadEntity.getChksum());
}
scr.add("-u"); // cleanup
String result;
result = scr.execute();
if (result != null) {
return result;
}
// Set permissions for the downloaded template
File downloadedTemplate = new File(resourcePath + "/" + templateFilename);
_storage.setWorldReadableAndWriteable(downloadedTemplate);
// Set permissions for template/volume.properties
String propertiesFile = resourcePath;
if (resourceType == UploadEntity.ResourceType.TEMPLATE) {
propertiesFile += "/template.properties";
} else {
propertiesFile += "/volume.properties";
}
File templateProperties = new File(propertiesFile);
_storage.setWorldReadableAndWriteable(templateProperties);
TemplateLocation loc = new TemplateLocation(_storage, resourcePath);
try {
loc.create(uploadEntity.getEntityId(), true, uploadEntity.getFilename());
} catch (IOException e) {
s_logger.warn("Something is wrong with template location " + resourcePath, e);
loc.purge();
return "Unable to upload due to " + e.getMessage();
}
Map<String, Processor> processors = _dlMgr.getProcessors();
for (Processor processor : processors.values()) {
FormatInfo info = null;
try {
info = processor.process(resourcePath, null, templateName);
} catch (InternalErrorException e) {
s_logger.error("Template process exception ", e);
return e.toString();
}
if (info != null) {
loc.addFormat(info);
uploadEntity.setVirtualSize(info.virtualSize);
uploadEntity.setPhysicalSize(info.size);
break;
}
}
if (!loc.save()) {
s_logger.warn("Cleaning up because we're unable to save the formats");
loc.purge();
}
uploadEntity.setStatus(UploadEntity.Status.COMPLETED);
uploadEntityStateMap.put(uploadEntity.getUuid(), uploadEntity);
return null;
}
private String getPostUploadPSK() {
if(_ssvmPSK == null ) {
try {
_ssvmPSK = FileUtils.readFileToString(new File(POST_UPLOAD_KEY_LOCATION), "utf-8");
} catch (IOException e) {
s_logger.debug("Error while reading SSVM PSK from location " + POST_UPLOAD_KEY_LOCATION, e);
}
}
return _ssvmPSK;
}
public void updateStateMapWithError(String uuid,String errorMessage) {
UploadEntity uploadEntity=null;
if (uploadEntityStateMap.get(uuid)!=null) {
uploadEntity=uploadEntityStateMap.get(uuid);
}else {
uploadEntity= new UploadEntity();
}
uploadEntity.setStatus(UploadEntity.Status.ERROR);
uploadEntity.setErrorMessage(errorMessage);
uploadEntityStateMap.put(uuid, uploadEntity);
}
public void validatePostUploadRequest(String signature, String metadata, String timeout, String hostname,long contentLength, String uuid) throws InvalidParameterValueException{
// check none of the params are empty
if(StringUtils.isEmpty(signature) || StringUtils.isEmpty(metadata) || StringUtils.isEmpty(timeout)) {
updateStateMapWithError(uuid,"signature, metadata and expires are compulsory fields.");
throw new InvalidParameterValueException("signature, metadata and expires are compulsory fields.");
}
//check that contentLength exists and is greater than zero
if (contentLength <= 0) {
throw new InvalidParameterValueException("content length is not set in the request or has invalid value.");
}
//validate signature
String fullUrl = "https://" + hostname + "/upload/" + uuid;
String computedSignature = EncryptionUtil.generateSignature(metadata + fullUrl + timeout, getPostUploadPSK());
boolean isSignatureValid = computedSignature.equals(signature);
if(!isSignatureValid) {
updateStateMapWithError(uuid,"signature validation failed.");
throw new InvalidParameterValueException("signature validation failed.");
}
//validate timeout
DateTime timeoutDateTime = DateTime.parse(timeout, ISODateTimeFormat.dateTime());
if(timeoutDateTime.isBeforeNow()) {
updateStateMapWithError(uuid,"request not valid anymore.");
throw new InvalidParameterValueException("request not valid anymore.");
}
}
private TemplateOrVolumePostUploadCommand getTemplateOrVolumePostUploadCmd(String metadata) {
TemplateOrVolumePostUploadCommand cmd = null;
try {
Gson gson = new GsonBuilder().create();
cmd = gson.fromJson(EncryptionUtil.decodeData(metadata, getPostUploadPSK()), TemplateOrVolumePostUploadCommand.class);
} catch(Exception ex) {
s_logger.error("exception while decoding and deserialising metadata", ex);
}
return cmd;
}
}
|
services/secondary-storage/server/src/org/apache/cloudstack/storage/resource/NfsSecondaryStorageResource.java
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.storage.resource;
import static com.cloud.utils.S3Utils.mputFile;
import static com.cloud.utils.S3Utils.putFile;
import static com.cloud.utils.StringUtils.join;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static org.apache.commons.lang.StringUtils.substringAfterLast;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.naming.ConfigurationException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.storage.Storage;
import com.cloud.storage.template.TemplateConstants;
import com.cloud.utils.EncryptionUtil;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.codec.http.HttpContentCompressor;
import io.netty.handler.codec.http.HttpRequestDecoder;
import io.netty.handler.codec.http.HttpResponseEncoder;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import org.apache.cloudstack.storage.command.TemplateOrVolumePostUploadCommand;
import org.apache.cloudstack.storage.template.UploadEntity;
import org.apache.cloudstack.utils.imagestore.ImageStoreUtil;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.log4j.Logger;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import org.apache.cloudstack.framework.security.keystore.KeystoreManager;
import org.apache.cloudstack.storage.command.CopyCmdAnswer;
import org.apache.cloudstack.storage.command.CopyCommand;
import org.apache.cloudstack.storage.command.DeleteCommand;
import org.apache.cloudstack.storage.command.DownloadCommand;
import org.apache.cloudstack.storage.command.DownloadProgressCommand;
import org.apache.cloudstack.storage.command.UploadStatusAnswer;
import org.apache.cloudstack.storage.command.UploadStatusAnswer.UploadStatus;
import org.apache.cloudstack.storage.command.UploadStatusCommand;
import org.apache.cloudstack.storage.template.DownloadManager;
import org.apache.cloudstack.storage.template.DownloadManagerImpl;
import org.apache.cloudstack.storage.template.DownloadManagerImpl.ZfsPathParser;
import org.apache.cloudstack.storage.template.UploadManager;
import org.apache.cloudstack.storage.template.UploadManagerImpl;
import org.apache.cloudstack.storage.to.SnapshotObjectTO;
import org.apache.cloudstack.storage.to.TemplateObjectTO;
import org.apache.cloudstack.storage.to.VolumeObjectTO;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.CheckHealthAnswer;
import com.cloud.agent.api.CheckHealthCommand;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.ComputeChecksumCommand;
import com.cloud.agent.api.DeleteSnapshotsDirCommand;
import com.cloud.agent.api.GetStorageStatsAnswer;
import com.cloud.agent.api.GetStorageStatsCommand;
import com.cloud.agent.api.PingCommand;
import com.cloud.agent.api.PingStorageCommand;
import com.cloud.agent.api.ReadyAnswer;
import com.cloud.agent.api.ReadyCommand;
import com.cloud.agent.api.SecStorageFirewallCfgCommand;
import com.cloud.agent.api.SecStorageFirewallCfgCommand.PortConfig;
import com.cloud.agent.api.SecStorageSetupAnswer;
import com.cloud.agent.api.SecStorageSetupCommand;
import com.cloud.agent.api.SecStorageVMSetupCommand;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupSecondaryStorageCommand;
import com.cloud.agent.api.storage.CreateEntityDownloadURLCommand;
import com.cloud.agent.api.storage.DeleteEntityDownloadURLCommand;
import com.cloud.agent.api.storage.DownloadAnswer;
import com.cloud.agent.api.storage.ListTemplateAnswer;
import com.cloud.agent.api.storage.ListTemplateCommand;
import com.cloud.agent.api.storage.ListVolumeAnswer;
import com.cloud.agent.api.storage.ListVolumeCommand;
import com.cloud.agent.api.storage.UploadCommand;
import com.cloud.agent.api.to.DataObjectType;
import com.cloud.agent.api.to.DataStoreTO;
import com.cloud.agent.api.to.DataTO;
import com.cloud.agent.api.to.NfsTO;
import com.cloud.agent.api.to.S3TO;
import com.cloud.agent.api.to.SwiftTO;
import com.cloud.exception.InternalErrorException;
import com.cloud.host.Host;
import com.cloud.host.Host.Type;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.resource.ServerResourceBase;
import com.cloud.storage.DataStoreRole;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.StorageLayer;
import com.cloud.storage.VMTemplateStorageResourceAssoc;
import com.cloud.storage.template.OVAProcessor;
import com.cloud.storage.template.Processor;
import com.cloud.storage.template.Processor.FormatInfo;
import com.cloud.storage.template.QCOW2Processor;
import com.cloud.storage.template.RawImageProcessor;
import com.cloud.storage.template.TARProcessor;
import com.cloud.storage.template.TemplateLocation;
import com.cloud.storage.template.TemplateProp;
import com.cloud.storage.template.VhdProcessor;
import com.cloud.storage.template.VmdkProcessor;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.S3Utils;
import com.cloud.utils.S3Utils.FileNamingStrategy;
import com.cloud.utils.SwiftUtil;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.NetUtils;
import com.cloud.utils.script.OutputInterpreter;
import com.cloud.utils.script.Script;
import com.cloud.vm.SecondaryStorageVm;
import org.joda.time.DateTime;
import org.joda.time.format.ISODateTimeFormat;
public class NfsSecondaryStorageResource extends ServerResourceBase implements SecondaryStorageResource {
private static final Logger s_logger = Logger.getLogger(NfsSecondaryStorageResource.class);
private static final String TEMPLATE_ROOT_DIR = "template/tmpl";
private static final String VOLUME_ROOT_DIR = "volumes";
private static final String POST_UPLOAD_KEY_LOCATION = "/etc/cloudstack/agent/ms-psk";
int _timeout;
public int getTimeout() {
return _timeout;
}
public void setTimeout(int timeout) {
_timeout = timeout;
}
String _instance;
String _dc;
String _pod;
String _guid;
String _role;
Map<String, Object> _params;
protected StorageLayer _storage;
protected boolean _inSystemVM = false;
boolean _sslCopy = false;
protected DownloadManager _dlMgr;
protected UploadManager _upldMgr;
private String _configSslScr;
private String _configAuthScr;
private String _configIpFirewallScr;
private String _publicIp;
private String _hostname;
private String _localgw;
private String _eth1mask;
private String _eth1ip;
private String _storageIp;
private String _storageNetmask;
private String _storageGateway;
private final List<String> nfsIps = new ArrayList<String>();
protected String _parent = "/mnt/SecStorage";
final private String _tmpltpp = "template.properties";
protected String createTemplateFromSnapshotXenScript;
private HashMap<String,UploadEntity> uploadEntityStateMap = new HashMap<String,UploadEntity>();
private String _ssvmPSK = null;
public void setParentPath(String path) {
_parent = path;
}
public String getMountingRoot() {
return _parent;
}
@Override
public void disconnected() {
}
public void setInSystemVM(boolean inSystemVM) {
_inSystemVM = inSystemVM;
}
@Override
public Answer executeRequest(Command cmd) {
if (cmd instanceof DownloadProgressCommand) {
return _dlMgr.handleDownloadCommand(this, (DownloadProgressCommand)cmd);
} else if (cmd instanceof DownloadCommand) {
return execute((DownloadCommand)cmd);
} else if (cmd instanceof UploadCommand) {
return _upldMgr.handleUploadCommand(this, (UploadCommand)cmd);
} else if (cmd instanceof CreateEntityDownloadURLCommand) {
return _upldMgr.handleCreateEntityURLCommand((CreateEntityDownloadURLCommand)cmd);
} else if (cmd instanceof DeleteEntityDownloadURLCommand) {
return _upldMgr.handleDeleteEntityDownloadURLCommand((DeleteEntityDownloadURLCommand)cmd);
} else if (cmd instanceof GetStorageStatsCommand) {
return execute((GetStorageStatsCommand)cmd);
} else if (cmd instanceof CheckHealthCommand) {
return new CheckHealthAnswer((CheckHealthCommand)cmd, true);
} else if (cmd instanceof ReadyCommand) {
return new ReadyAnswer((ReadyCommand)cmd);
} else if (cmd instanceof SecStorageFirewallCfgCommand) {
return execute((SecStorageFirewallCfgCommand)cmd);
} else if (cmd instanceof SecStorageVMSetupCommand) {
return execute((SecStorageVMSetupCommand)cmd);
} else if (cmd instanceof SecStorageSetupCommand) {
return execute((SecStorageSetupCommand)cmd);
} else if (cmd instanceof ComputeChecksumCommand) {
return execute((ComputeChecksumCommand)cmd);
} else if (cmd instanceof ListTemplateCommand) {
return execute((ListTemplateCommand)cmd);
} else if (cmd instanceof ListVolumeCommand) {
return execute((ListVolumeCommand)cmd);
} else if (cmd instanceof DeleteSnapshotsDirCommand) {
return execute((DeleteSnapshotsDirCommand)cmd);
} else if (cmd instanceof CopyCommand) {
return execute((CopyCommand)cmd);
} else if (cmd instanceof DeleteCommand) {
return execute((DeleteCommand)cmd);
} else if (cmd instanceof UploadStatusCommand) {
return execute((UploadStatusCommand)cmd);
} else {
return Answer.createUnsupportedCommandAnswer(cmd);
}
}
protected CopyCmdAnswer postProcessing(File destFile, String downloadPath, String destPath, DataTO srcData, DataTO destData) throws ConfigurationException {
if (destData.getObjectType() == DataObjectType.SNAPSHOT) {
SnapshotObjectTO snapshot = new SnapshotObjectTO();
snapshot.setPath(destPath + File.separator + destFile.getName());
CopyCmdAnswer answer = new CopyCmdAnswer(snapshot);
return answer;
}
// do post processing to unzip the file if it is compressed
String scriptsDir = "scripts/storage/secondary";
String createTmpltScr = Script.findScript(scriptsDir, "createtmplt.sh");
if (createTmpltScr == null) {
throw new ConfigurationException("Unable to find createtmplt.sh");
}
s_logger.info("createtmplt.sh found in " + createTmpltScr);
String createVolScr = Script.findScript(scriptsDir, "createvolume.sh");
if (createVolScr == null) {
throw new ConfigurationException("Unable to find createvolume.sh");
}
s_logger.info("createvolume.sh found in " + createVolScr);
String script = srcData.getObjectType() == DataObjectType.TEMPLATE ? createTmpltScr : createVolScr;
int installTimeoutPerGig = 180 * 60 * 1000;
long imgSizeGigs = (long)Math.ceil(destFile.length() * 1.0d / (1024 * 1024 * 1024));
imgSizeGigs++; // add one just in case
long timeout = imgSizeGigs * installTimeoutPerGig;
String origPath = destFile.getAbsolutePath();
String extension = null;
if (srcData.getObjectType() == DataObjectType.TEMPLATE) {
extension = ((TemplateObjectTO)srcData).getFormat().getFileExtension();
} else if (srcData.getObjectType() == DataObjectType.VOLUME) {
extension = ((VolumeObjectTO)srcData).getFormat().getFileExtension();
}
String templateName = UUID.randomUUID().toString();
String templateFilename = templateName + "." + extension;
Script scr = new Script(script, timeout, s_logger);
scr.add("-s", Long.toString(imgSizeGigs)); // not used for now
scr.add("-n", templateFilename);
scr.add("-t", downloadPath);
scr.add("-f", origPath); // this is the temporary
// template file downloaded
String result;
result = scr.execute();
if (result != null) {
// script execution failure
throw new CloudRuntimeException("Failed to run script " + script);
}
String finalFileName = templateFilename;
String finalDownloadPath = destPath + File.separator + templateFilename;
// compute the size of
long size = _storage.getSize(downloadPath + File.separator + templateFilename);
DataTO newDestTO = null;
if (destData.getObjectType() == DataObjectType.TEMPLATE) {
TemplateObjectTO newTemplTO = new TemplateObjectTO();
newTemplTO.setPath(finalDownloadPath);
newTemplTO.setName(finalFileName);
newTemplTO.setSize(size);
newTemplTO.setPhysicalSize(size);
newDestTO = newTemplTO;
} else {
VolumeObjectTO newVolTO = new VolumeObjectTO();
newVolTO.setPath(finalDownloadPath);
newVolTO.setName(finalFileName);
newVolTO.setSize(size);
newDestTO = newVolTO;
}
return new CopyCmdAnswer(newDestTO);
}
protected Answer copyFromSwiftToNfs(CopyCommand cmd, DataTO srcData, SwiftTO swiftTO, DataTO destData, NfsTO destImageStore) {
final String storagePath = destImageStore.getUrl();
final String destPath = destData.getPath();
try {
String downloadPath = determineStorageTemplatePath(storagePath, destPath);
final File downloadDirectory = _storage.getFile(downloadPath);
if (!downloadDirectory.mkdirs()) {
return new CopyCmdAnswer("Failed to create download directory " + downloadPath);
}
File destFile = SwiftUtil.getObject(swiftTO, downloadDirectory, srcData.getPath());
return postProcessing(destFile, downloadPath, destPath, srcData, destData);
} catch (Exception e) {
s_logger.debug("Failed to copy swift to nfs", e);
return new CopyCmdAnswer(e.toString());
}
}
protected Answer copyFromS3ToNfs(CopyCommand cmd, DataTO srcData, S3TO s3, DataTO destData, NfsTO destImageStore) {
final String storagePath = destImageStore.getUrl();
final String destPath = destData.getPath();
try {
String downloadPath = determineStorageTemplatePath(storagePath, destPath);
final File downloadDirectory = _storage.getFile(downloadPath);
if (downloadDirectory.exists()) {
s_logger.debug("Directory " + downloadPath + " already exists");
} else {
if (!downloadDirectory.mkdirs()) {
final String errMsg = "Unable to create directory " + downloadPath + " to copy from S3 to cache.";
s_logger.error(errMsg);
return new CopyCmdAnswer(errMsg);
}
}
File destFile = S3Utils.getFile(s3, s3.getBucketName(), srcData.getPath(), downloadDirectory, new FileNamingStrategy() {
@Override
public String determineFileName(final String key) {
return substringAfterLast(key, S3Utils.SEPARATOR);
}
});
if (destFile == null) {
return new CopyCmdAnswer("Can't find template");
}
return postProcessing(destFile, downloadPath, destPath, srcData, destData);
} catch (Exception e) {
final String errMsg = format("Failed to download" + "due to $2%s", e.getMessage());
s_logger.error(errMsg, e);
return new CopyCmdAnswer(errMsg);
}
}
protected Answer copySnapshotToTemplateFromNfsToNfsXenserver(CopyCommand cmd, SnapshotObjectTO srcData, NfsTO srcDataStore, TemplateObjectTO destData,
NfsTO destDataStore) {
String srcMountPoint = getRootDir(srcDataStore.getUrl());
String snapshotPath = srcData.getPath();
int index = snapshotPath.lastIndexOf("/");
String snapshotName = snapshotPath.substring(index + 1);
if (!snapshotName.startsWith("VHD-") && !snapshotName.endsWith(".vhd")) {
snapshotName = snapshotName + ".vhd";
}
snapshotPath = snapshotPath.substring(0, index);
snapshotPath = srcMountPoint + File.separator + snapshotPath;
String destMountPoint = getRootDir(destDataStore.getUrl());
String destPath = destMountPoint + File.separator + destData.getPath();
String errMsg = null;
try {
_storage.mkdir(destPath);
String templateUuid = UUID.randomUUID().toString();
String templateName = templateUuid + ".vhd";
Script command = new Script(createTemplateFromSnapshotXenScript, cmd.getWait() * 1000, s_logger);
command.add("-p", snapshotPath);
command.add("-s", snapshotName);
command.add("-n", templateName);
command.add("-t", destPath);
String result = command.execute();
if (result != null && !result.equalsIgnoreCase("")) {
return new CopyCmdAnswer(result);
}
Map<String, Object> params = new HashMap<String, Object>();
params.put(StorageLayer.InstanceConfigKey, _storage);
Processor processor = new VhdProcessor();
processor.configure("Vhd Processor", params);
FormatInfo info = processor.process(destPath, null, templateUuid);
TemplateLocation loc = new TemplateLocation(_storage, destPath);
loc.create(1, true, templateUuid);
loc.addFormat(info);
loc.save();
TemplateProp prop = loc.getTemplateInfo();
TemplateObjectTO newTemplate = new TemplateObjectTO();
newTemplate.setPath(destData.getPath() + File.separator + templateName);
newTemplate.setFormat(ImageFormat.VHD);
newTemplate.setSize(prop.getSize());
newTemplate.setPhysicalSize(prop.getPhysicalSize());
newTemplate.setName(templateUuid);
return new CopyCmdAnswer(newTemplate);
} catch (ConfigurationException e) {
s_logger.debug("Failed to create template from snapshot: " + e.toString());
errMsg = e.toString();
} catch (InternalErrorException e) {
s_logger.debug("Failed to create template from snapshot: " + e.toString());
errMsg = e.toString();
} catch (IOException e) {
s_logger.debug("Failed to create template from snapshot: " + e.toString());
errMsg = e.toString();
}
return new CopyCmdAnswer(errMsg);
}
protected Answer copySnapshotToTemplateFromNfsToNfs(CopyCommand cmd, SnapshotObjectTO srcData, NfsTO srcDataStore, TemplateObjectTO destData, NfsTO destDataStore) {
if (srcData.getHypervisorType() == HypervisorType.XenServer) {
return copySnapshotToTemplateFromNfsToNfsXenserver(cmd, srcData, srcDataStore, destData, destDataStore);
} else if (srcData.getHypervisorType() == HypervisorType.KVM) {
File srcFile = getFile(srcData.getPath(), srcDataStore.getUrl());
File destFile = getFile(destData.getPath(), destDataStore.getUrl());
VolumeObjectTO volumeObjectTO = srcData.getVolume();
ImageFormat srcFormat = null;
//TODO: the image format should be stored in snapshot table, instead of getting from volume
if (volumeObjectTO != null) {
srcFormat = volumeObjectTO.getFormat();
} else {
srcFormat = ImageFormat.QCOW2;
}
// get snapshot file name
String templateName = srcFile.getName();
// add kvm file extension for copied template name
String fileName = templateName + "." + srcFormat.getFileExtension();
String destFileFullPath = destFile.getAbsolutePath() + File.separator + fileName;
s_logger.debug("copy snapshot " + srcFile.getAbsolutePath() + " to template " + destFileFullPath);
Script.runSimpleBashScript("cp " + srcFile.getAbsolutePath() + " " + destFileFullPath);
try {
// generate template.properties file
String metaFileName = destFile.getAbsolutePath() + File.separator + "template.properties";
_storage.create(destFile.getAbsolutePath(), "template.properties");
File metaFile = new File(metaFileName);
FileWriter writer = new FileWriter(metaFile);
BufferedWriter bufferWriter = new BufferedWriter(writer);
// KVM didn't change template unique name, just used the template name passed from orchestration layer, so no need
// to send template name back.
bufferWriter.write("uniquename=" + destData.getName());
bufferWriter.write("\n");
bufferWriter.write("filename=" + fileName);
bufferWriter.write("\n");
long size = _storage.getSize(destFileFullPath);
bufferWriter.write("size=" + size);
bufferWriter.close();
writer.close();
/**
* Snapshots might be in either QCOW2 or RAW image format
*
* For example RBD snapshots are in RAW format
*/
Processor processor = null;
if (srcFormat == ImageFormat.QCOW2) {
processor = new QCOW2Processor();
} else if (srcFormat == ImageFormat.RAW) {
processor = new RawImageProcessor();
} else {
throw new ConfigurationException("Unknown image format " + srcFormat.toString());
}
Map<String, Object> params = new HashMap<String, Object>();
params.put(StorageLayer.InstanceConfigKey, _storage);
processor.configure("template processor", params);
String destPath = destFile.getAbsolutePath();
FormatInfo info = processor.process(destPath, null, templateName);
TemplateLocation loc = new TemplateLocation(_storage, destPath);
loc.create(1, true, destData.getName());
loc.addFormat(info);
loc.save();
TemplateProp prop = loc.getTemplateInfo();
TemplateObjectTO newTemplate = new TemplateObjectTO();
newTemplate.setPath(destData.getPath() + File.separator + fileName);
newTemplate.setFormat(srcFormat);
newTemplate.setSize(prop.getSize());
newTemplate.setPhysicalSize(prop.getPhysicalSize());
return new CopyCmdAnswer(newTemplate);
} catch (ConfigurationException e) {
s_logger.debug("Failed to create template:" + e.toString());
return new CopyCmdAnswer(e.toString());
} catch (IOException e) {
s_logger.debug("Failed to create template:" + e.toString());
return new CopyCmdAnswer(e.toString());
} catch (InternalErrorException e) {
s_logger.debug("Failed to create template:" + e.toString());
return new CopyCmdAnswer(e.toString());
}
}
return new CopyCmdAnswer("");
}
protected File getFile(String path, String nfsPath) {
String filePath = getRootDir(nfsPath) + File.separator + path;
File f = new File(filePath);
if (!f.exists()) {
_storage.mkdirs(filePath);
f = new File(filePath);
}
return f;
}
protected Answer createTemplateFromSnapshot(CopyCommand cmd) {
DataTO srcData = cmd.getSrcTO();
DataTO destData = cmd.getDestTO();
DataStoreTO srcDataStore = srcData.getDataStore();
DataStoreTO destDataStore = destData.getDataStore();
if (srcDataStore.getRole() == DataStoreRole.Image || srcDataStore.getRole() == DataStoreRole.ImageCache || srcDataStore.getRole() == DataStoreRole.Primary) {
if (!(srcDataStore instanceof NfsTO)) {
s_logger.debug("only support nfs storage as src, when create template from snapshot");
return Answer.createUnsupportedCommandAnswer(cmd);
}
if (destDataStore instanceof NfsTO) {
return copySnapshotToTemplateFromNfsToNfs(cmd, (SnapshotObjectTO)srcData, (NfsTO)srcDataStore, (TemplateObjectTO)destData, (NfsTO)destDataStore);
} else if (destDataStore instanceof SwiftTO) {
//create template on the same data store
CopyCmdAnswer answer =
(CopyCmdAnswer)copySnapshotToTemplateFromNfsToNfs(cmd, (SnapshotObjectTO)srcData, (NfsTO)srcDataStore, (TemplateObjectTO)destData,
(NfsTO)srcDataStore);
if (!answer.getResult()) {
return answer;
}
s_logger.debug("starting copy template to swift");
DataTO newTemplate = answer.getNewData();
File templateFile = getFile(newTemplate.getPath(), ((NfsTO)srcDataStore).getUrl());
SwiftTO swift = (SwiftTO)destDataStore;
String containterName = SwiftUtil.getContainerName(destData.getObjectType().toString(), destData.getId());
String swiftPath = SwiftUtil.putObject(swift, templateFile, containterName, templateFile.getName());
//upload template.properties
File properties = new File(templateFile.getParent() + File.separator + _tmpltpp);
if (properties.exists()) {
SwiftUtil.putObject(swift, properties, containterName, _tmpltpp);
}
//clean up template data on staging area
try {
DeleteCommand deleteCommand = new DeleteCommand(newTemplate);
execute(deleteCommand);
} catch (Exception e) {
s_logger.debug("Failed to clean up staging area:", e);
}
TemplateObjectTO template = new TemplateObjectTO();
template.setPath(swiftPath);
template.setSize(templateFile.length());
template.setPhysicalSize(template.getSize());
SnapshotObjectTO snapshot = (SnapshotObjectTO)srcData;
template.setFormat(snapshot.getVolume().getFormat());
return new CopyCmdAnswer(template);
} else if (destDataStore instanceof S3TO) {
//create template on the same data store
CopyCmdAnswer answer =
(CopyCmdAnswer)copySnapshotToTemplateFromNfsToNfs(cmd, (SnapshotObjectTO)srcData, (NfsTO)srcDataStore, (TemplateObjectTO)destData,
(NfsTO)srcDataStore);
if (!answer.getResult()) {
return answer;
}
TemplateObjectTO newTemplate = (TemplateObjectTO)answer.getNewData();
newTemplate.setDataStore(srcDataStore);
CopyCommand newCpyCmd = new CopyCommand(newTemplate, destData, cmd.getWait(), cmd.executeInSequence());
Answer result = copyFromNfsToS3(newCpyCmd);
//clean up template data on staging area
try {
DeleteCommand deleteCommand = new DeleteCommand(newTemplate);
execute(deleteCommand);
} catch (Exception e) {
s_logger.debug("Failed to clean up staging area:", e);
}
return result;
}
}
s_logger.debug("Failed to create templat from snapshot");
return new CopyCmdAnswer("Unsupported prototcol");
}
protected Answer copyFromNfsToImage(CopyCommand cmd) {
DataTO destData = cmd.getDestTO();
DataStoreTO destDataStore = destData.getDataStore();
if (destDataStore instanceof S3TO) {
return copyFromNfsToS3(cmd);
} else {
return new CopyCmdAnswer("unsupported ");
}
}
protected Answer execute(CopyCommand cmd) {
DataTO srcData = cmd.getSrcTO();
DataTO destData = cmd.getDestTO();
DataStoreTO srcDataStore = srcData.getDataStore();
DataStoreTO destDataStore = destData.getDataStore();
if (srcData.getObjectType() == DataObjectType.SNAPSHOT && destData.getObjectType() == DataObjectType.TEMPLATE) {
return createTemplateFromSnapshot(cmd);
}
if (destDataStore instanceof NfsTO && destDataStore.getRole() == DataStoreRole.ImageCache) {
NfsTO destImageStore = (NfsTO)destDataStore;
if (srcDataStore instanceof S3TO) {
S3TO s3 = (S3TO)srcDataStore;
return copyFromS3ToNfs(cmd, srcData, s3, destData, destImageStore);
} else if (srcDataStore instanceof SwiftTO) {
return copyFromSwiftToNfs(cmd, srcData, (SwiftTO)srcDataStore, destData, destImageStore);
}
}
if (srcDataStore.getRole() == DataStoreRole.ImageCache && destDataStore.getRole() == DataStoreRole.Image) {
return copyFromNfsToImage(cmd);
}
return Answer.createUnsupportedCommandAnswer(cmd);
}
@SuppressWarnings("unchecked")
protected String determineS3TemplateDirectory(final Long accountId, final Long templateId, final String templateUniqueName) {
return join(asList(TEMPLATE_ROOT_DIR, accountId, templateId, templateUniqueName), S3Utils.SEPARATOR);
}
private String determineS3TemplateNameFromKey(String key) {
return StringUtils.substringAfterLast(StringUtils.substringBeforeLast(key, S3Utils.SEPARATOR), S3Utils.SEPARATOR);
}
@SuppressWarnings("unchecked")
protected String determineS3VolumeDirectory(final Long accountId, final Long volId) {
return join(asList(VOLUME_ROOT_DIR, accountId, volId), S3Utils.SEPARATOR);
}
protected Long determineS3VolumeIdFromKey(String key) {
return Long.parseLong(StringUtils.substringAfterLast(StringUtils.substringBeforeLast(key, S3Utils.SEPARATOR), S3Utils.SEPARATOR));
}
private String determineStorageTemplatePath(final String storagePath, String dataPath) {
return join(asList(getRootDir(storagePath), dataPath), File.separator);
}
protected File downloadFromUrlToNfs(String url, NfsTO nfs, String path, String name) {
HttpClient client = new DefaultHttpClient();
HttpGet get = new HttpGet(url);
try {
HttpResponse response = client.execute(get);
HttpEntity entity = response.getEntity();
if (entity == null) {
s_logger.debug("Faled to get entity");
throw new CloudRuntimeException("Failed to get url: " + url);
}
String nfsMountPath = getRootDir(nfs.getUrl());
String filePath = nfsMountPath + File.separator + path;
File directory = new File(filePath);
if (!directory.exists()) {
_storage.mkdirs(filePath);
}
File destFile = new File(filePath + File.separator + name);
if (!destFile.createNewFile()) {
s_logger.warn("Reusing existing file " + destFile.getPath());
}
try(FileOutputStream outputStream = new FileOutputStream(destFile);) {
entity.writeTo(outputStream);
}catch (IOException e) {
s_logger.debug("downloadFromUrlToNfs:Exception:"+e.getMessage(),e);
}
return new File(destFile.getAbsolutePath());
} catch (IOException e) {
s_logger.debug("Faild to get url:" + url + ", due to " + e.toString());
throw new CloudRuntimeException(e);
}
}
protected Answer registerTemplateOnSwift(DownloadCommand cmd) {
SwiftTO swiftTO = (SwiftTO)cmd.getDataStore();
String path = cmd.getInstallPath();
DataStoreTO cacheStore = cmd.getCacheStore();
if (cacheStore == null || !(cacheStore instanceof NfsTO)) {
return new DownloadAnswer("cache store can't be null", VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR);
}
File file = null;
try {
NfsTO nfsCacheStore = (NfsTO)cacheStore;
String fileName = cmd.getName() + "." + cmd.getFormat().getFileExtension();
file = downloadFromUrlToNfs(cmd.getUrl(), nfsCacheStore, path, fileName);
String container = "T-" + cmd.getId();
String swiftPath = SwiftUtil.putObject(swiftTO, file, container, null);
//put metda file
File uniqDir = _storage.createUniqDir();
String metaFileName = uniqDir.getAbsolutePath() + File.separator + "template.properties";
_storage.create(uniqDir.getAbsolutePath(), "template.properties");
File metaFile = new File(metaFileName);
FileWriter writer = new FileWriter(metaFile);
BufferedWriter bufferWriter = new BufferedWriter(writer);
bufferWriter.write("uniquename=" + cmd.getName());
bufferWriter.write("\n");
bufferWriter.write("filename=" + fileName);
bufferWriter.write("\n");
bufferWriter.write("size=" + file.length());
bufferWriter.close();
writer.close();
SwiftUtil.putObject(swiftTO, metaFile, container, "template.properties");
metaFile.delete();
uniqDir.delete();
String md5sum = null;
try (FileInputStream fs = new FileInputStream(file)){
md5sum = DigestUtils.md5Hex(fs);
} catch (IOException e) {
s_logger.debug("Failed to get md5sum: " + file.getAbsoluteFile());
}
DownloadAnswer answer =
new DownloadAnswer(null, 100, null, VMTemplateStorageResourceAssoc.Status.DOWNLOADED, swiftPath, swiftPath, file.length(), file.length(), md5sum);
return answer;
} catch (IOException e) {
s_logger.debug("Failed to register template into swift", e);
return new DownloadAnswer(e.toString(), VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR);
} finally {
if (file != null) {
file.delete();
}
}
}
private Answer execute(DownloadCommand cmd) {
DataStoreTO dstore = cmd.getDataStore();
if (dstore instanceof NfsTO || dstore instanceof S3TO) {
return _dlMgr.handleDownloadCommand(this, cmd);
} else if (dstore instanceof SwiftTO) {
return registerTemplateOnSwift(cmd);
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
private ImageFormat getTemplateFormat(String filePath) {
String ext = null;
int extensionPos = filePath.lastIndexOf('.');
int lastSeparator = Math.max(filePath.lastIndexOf('/'), filePath.lastIndexOf('\\'));
int i = lastSeparator > extensionPos ? -1 : extensionPos;
if (i > 0) {
ext = filePath.substring(i + 1);
}
if (ext != null) {
if (ext.equalsIgnoreCase("vhd")) {
return ImageFormat.VHD;
} else if (ext.equalsIgnoreCase("vhdx")) {
return ImageFormat.VHDX;
} else if (ext.equalsIgnoreCase("qcow2")) {
return ImageFormat.QCOW2;
} else if (ext.equalsIgnoreCase("ova")) {
return ImageFormat.OVA;
} else if (ext.equalsIgnoreCase("tar")) {
return ImageFormat.TAR;
} else if (ext.equalsIgnoreCase("img") || ext.equalsIgnoreCase("raw")) {
return ImageFormat.RAW;
} else if (ext.equalsIgnoreCase("vmdk")) {
return ImageFormat.VMDK;
} else if (ext.equalsIgnoreCase("vdi")) {
return ImageFormat.VDI;
}
}
return null;
}
protected long getVirtualSize(File file, ImageFormat format) {
Processor processor = null;
try {
if (format == null) {
return file.length();
} else if (format == ImageFormat.QCOW2) {
processor = new QCOW2Processor();
} else if (format == ImageFormat.OVA) {
processor = new OVAProcessor();
} else if (format == ImageFormat.VHD) {
processor = new VhdProcessor();
} else if (format == ImageFormat.RAW) {
processor = new RawImageProcessor();
} else if (format == ImageFormat.VMDK) {
processor = new VmdkProcessor();
} if (format == ImageFormat.TAR) {
processor = new TARProcessor();
}
if (processor == null) {
return file.length();
}
processor.configure("template processor", new HashMap<String, Object>());
return processor.getVirtualSize(file);
} catch (Exception e) {
s_logger.warn("Failed to get virtual size, returning file size instead:", e);
return file.length();
}
}
protected Answer copyFromNfsToS3(CopyCommand cmd) {
final DataTO srcData = cmd.getSrcTO();
final DataTO destData = cmd.getDestTO();
DataStoreTO srcDataStore = srcData.getDataStore();
NfsTO srcStore = (NfsTO)srcDataStore;
DataStoreTO destDataStore = destData.getDataStore();
final S3TO s3 = (S3TO)destDataStore;
try {
final String templatePath = determineStorageTemplatePath(srcStore.getUrl(), srcData.getPath());
if (s_logger.isDebugEnabled()) {
s_logger.debug("Found " + srcData.getObjectType() + " from directory " + templatePath + " to upload to S3.");
}
final String bucket = s3.getBucketName();
File srcFile = _storage.getFile(templatePath);
// guard the case where templatePath does not have file extension, since we are not completely sure
// about hypervisor, so we check each extension
if (!srcFile.exists()) {
srcFile = _storage.getFile(templatePath + ".qcow2");
if (!srcFile.exists()) {
srcFile = _storage.getFile(templatePath + ".vhd");
if (!srcFile.exists()) {
srcFile = _storage.getFile(templatePath + ".ova");
if (!srcFile.exists()) {
srcFile = _storage.getFile(templatePath + ".vmdk");
if (!srcFile.exists()) {
return new CopyCmdAnswer("Can't find src file:" + templatePath);
}
}
}
}
}
long srcSize = srcFile.length();
ImageFormat format = getTemplateFormat(srcFile.getName());
String key = destData.getPath() + S3Utils.SEPARATOR + srcFile.getName();
if (!s3.getSingleUpload(srcSize)) {
mputFile(s3, srcFile, bucket, key);
} else {
putFile(s3, srcFile, bucket, key);
}
DataTO retObj = null;
if (destData.getObjectType() == DataObjectType.TEMPLATE) {
TemplateObjectTO newTemplate = new TemplateObjectTO();
newTemplate.setPath(key);
newTemplate.setSize(getVirtualSize(srcFile, format));
newTemplate.setPhysicalSize(srcFile.length());
newTemplate.setFormat(format);
retObj = newTemplate;
} else if (destData.getObjectType() == DataObjectType.VOLUME) {
VolumeObjectTO newVol = new VolumeObjectTO();
newVol.setPath(key);
newVol.setSize(srcFile.length());
retObj = newVol;
} else if (destData.getObjectType() == DataObjectType.SNAPSHOT) {
SnapshotObjectTO newSnapshot = new SnapshotObjectTO();
newSnapshot.setPath(key);
retObj = newSnapshot;
}
return new CopyCmdAnswer(retObj);
} catch (Exception e) {
s_logger.error("failed to upload" + srcData.getPath(), e);
return new CopyCmdAnswer("failed to upload" + srcData.getPath() + e.toString());
}
}
String swiftDownload(SwiftTO swift, String container, String rfilename, String lFullPath) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " + swift.getAccount() + ":" +
swift.getUserName() + " -K " + swift.getKey() + " download " + container + " " + rfilename + " -o " + lFullPath);
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result != null) {
String errMsg = "swiftDownload failed err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
if (parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
for (String line : lines) {
if (line.contains("Errno") || line.contains("failed")) {
String errMsg = "swiftDownload failed , err=" + parser.getLines();
s_logger.warn(errMsg);
return errMsg;
}
}
}
return null;
}
String swiftDownloadContainer(SwiftTO swift, String container, String ldir) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("cd " + ldir + ";/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " + swift.getAccount() + ":" +
swift.getUserName() + " -K " + swift.getKey() + " download " + container);
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result != null) {
String errMsg = "swiftDownloadContainer failed err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
if (parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
for (String line : lines) {
if (line.contains("Errno") || line.contains("failed")) {
String errMsg = "swiftDownloadContainer failed , err=" + parser.getLines();
s_logger.warn(errMsg);
return errMsg;
}
}
}
return null;
}
String swiftUpload(SwiftTO swift, String container, String lDir, String lFilename) {
long SWIFT_MAX_SIZE = 5L * 1024L * 1024L * 1024L;
List<String> files = new ArrayList<String>();
if (lFilename.equals("*")) {
File dir = new File(lDir);
String [] dir_lst = dir.list();
if(dir_lst != null) {
for (String file : dir_lst) {
if (file.startsWith(".")) {
continue;
}
files.add(file);
}
}
} else {
files.add(lFilename);
}
for (String file : files) {
File f = new File(lDir + "/" + file);
long size = f.length();
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
if (size <= SWIFT_MAX_SIZE) {
command.add("cd " + lDir + ";/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " +
swift.getAccount() + ":" + swift.getUserName() + " -K " + swift.getKey() + " upload " + container + " " + file);
} else {
command.add("cd " + lDir + ";/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " +
swift.getAccount() + ":" + swift.getUserName() + " -K " + swift.getKey() + " upload -S " + SWIFT_MAX_SIZE + " " + container + " " + file);
}
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result != null) {
String errMsg = "swiftUpload failed , err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
if (parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
for (String line : lines) {
if (line.contains("Errno") || line.contains("failed")) {
String errMsg = "swiftUpload failed , err=" + parser.getLines();
s_logger.warn(errMsg);
return errMsg;
}
}
}
}
return null;
}
String[] swiftList(SwiftTO swift, String container, String rFilename) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " + swift.getAccount() + ":" +
swift.getUserName() + " -K " + swift.getKey() + " list " + container + " " + rFilename);
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result == null && parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
return lines;
} else {
if (result != null) {
String errMsg = "swiftList failed , err=" + result;
s_logger.warn(errMsg);
} else {
String errMsg = "swiftList failed, no lines returns";
s_logger.warn(errMsg);
}
}
return null;
}
String swiftDelete(SwiftTO swift, String container, String object) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("/usr/bin/python /usr/local/cloud/systemvm/scripts/storage/secondary/swift -A " + swift.getUrl() + " -U " + swift.getAccount() + ":" +
swift.getUserName() + " -K " + swift.getKey() + " delete " + container + " " + object);
OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser();
String result = command.execute(parser);
if (result != null) {
String errMsg = "swiftDelete failed , err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
if (parser.getLines() != null) {
String[] lines = parser.getLines().split("\\n");
for (String line : lines) {
if (line.contains("Errno") || line.contains("failed")) {
String errMsg = "swiftDelete failed , err=" + parser.getLines();
s_logger.warn(errMsg);
return errMsg;
}
}
}
return null;
}
public Answer execute(DeleteSnapshotsDirCommand cmd) {
DataStoreTO dstore = cmd.getDataStore();
if (dstore instanceof NfsTO) {
NfsTO nfs = (NfsTO)dstore;
String relativeSnapshotPath = cmd.getDirectory();
String parent = getRootDir(nfs.getUrl());
if (relativeSnapshotPath.startsWith(File.separator)) {
relativeSnapshotPath = relativeSnapshotPath.substring(1);
}
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String absoluteSnapshotPath = parent + relativeSnapshotPath;
File snapshotDir = new File(absoluteSnapshotPath);
String details = null;
if (!snapshotDir.exists()) {
details = "snapshot directory " + snapshotDir.getName() + " doesn't exist";
s_logger.debug(details);
return new Answer(cmd, true, details);
}
// delete all files in the directory
String lPath = absoluteSnapshotPath + "/*";
String result = deleteLocalFile(lPath);
if (result != null) {
String errMsg = "failed to delete all snapshots " + lPath + " , err=" + result;
s_logger.warn(errMsg);
return new Answer(cmd, false, errMsg);
}
// delete the directory
if (!snapshotDir.delete()) {
details = "Unable to delete directory " + snapshotDir.getName() + " under snapshot path " + relativeSnapshotPath;
s_logger.debug(details);
return new Answer(cmd, false, details);
}
return new Answer(cmd, true, null);
} else if (dstore instanceof S3TO) {
final S3TO s3 = (S3TO)dstore;
final String path = cmd.getDirectory();
final String bucket = s3.getBucketName();
try {
S3Utils.deleteDirectory(s3, bucket, path);
return new Answer(cmd, true, String.format("Deleted snapshot %1%s from bucket %2$s.", path, bucket));
} catch (Exception e) {
final String errorMessage =
String.format("Failed to delete snapshot %1$s from bucket %2$s due to the following error: %3$s", path, bucket, e.getMessage());
s_logger.error(errorMessage, e);
return new Answer(cmd, false, errorMessage);
}
} else if (dstore instanceof SwiftTO) {
String path = cmd.getDirectory();
String volumeId = StringUtils.substringAfterLast(path, "/"); // assuming
// that
// the
// filename
// is
// the
// last
// section
// in
// the
// path
String result = swiftDelete((SwiftTO)dstore, "V-" + volumeId.toString(), "");
if (result != null) {
String errMsg = "failed to delete snapshot for volume " + volumeId + " , err=" + result;
s_logger.warn(errMsg);
return new Answer(cmd, false, errMsg);
}
return new Answer(cmd, true, "Deleted snapshot " + path + " from swift");
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
private Answer execute(ComputeChecksumCommand cmd) {
String relativeTemplatePath = cmd.getTemplatePath();
DataStoreTO store = cmd.getStore();
if (!(store instanceof NfsTO)) {
return new Answer(cmd, false, "can't handle non nfs data store");
}
NfsTO nfsStore = (NfsTO)store;
String parent = getRootDir(nfsStore.getUrl());
if (relativeTemplatePath.startsWith(File.separator)) {
relativeTemplatePath = relativeTemplatePath.substring(1);
}
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String absoluteTemplatePath = parent + relativeTemplatePath;
MessageDigest digest;
String checksum = null;
File f = new File(absoluteTemplatePath);
InputStream is = null;
byte[] buffer = new byte[8192];
int read = 0;
if (s_logger.isDebugEnabled()) {
s_logger.debug("parent path " + parent + " relative template path " + relativeTemplatePath);
}
try {
digest = MessageDigest.getInstance("MD5");
is = new FileInputStream(f);
while ((read = is.read(buffer)) > 0) {
digest.update(buffer, 0, read);
}
byte[] md5sum = digest.digest();
BigInteger bigInt = new BigInteger(1, md5sum);
checksum = bigInt.toString(16);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Successfully calculated checksum for file " + absoluteTemplatePath + " - " + checksum);
}
} catch (IOException e) {
String logMsg = "Unable to process file for MD5 - " + absoluteTemplatePath;
s_logger.error(logMsg);
return new Answer(cmd, false, checksum);
} catch (NoSuchAlgorithmException e) {
return new Answer(cmd, false, checksum);
} finally {
try {
if (is != null) {
is.close();
}
} catch (IOException e) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Could not close the file " + absoluteTemplatePath);
}
return new Answer(cmd, false, checksum);
}
}
return new Answer(cmd, true, checksum);
}
private void configCerts(KeystoreManager.Certificates certs) {
if (certs == null) {
configureSSL();
} else {
String prvKey = certs.getPrivKey();
String pubCert = certs.getPrivCert();
String certChain = certs.getCertChain();
String rootCACert = certs.getRootCACert();
try {
File prvKeyFile = File.createTempFile("prvkey", null);
String prvkeyPath = prvKeyFile.getAbsolutePath();
try(BufferedWriter prvt_key_file = new BufferedWriter(new FileWriter(prvKeyFile));) {
prvt_key_file.write(prvKey);
}catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
File pubCertFile = File.createTempFile("pubcert", null);
String pubCertFilePath = pubCertFile.getAbsolutePath();
try(BufferedWriter pub_cert_file = new BufferedWriter(new FileWriter(pubCertFile));) {
pub_cert_file.write(pubCert);
}catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
String certChainFilePath = null, rootCACertFilePath = null;
File certChainFile = null, rootCACertFile = null;
if(certChain != null){
certChainFile = File.createTempFile("certchain", null);
certChainFilePath = certChainFile.getAbsolutePath();
try(BufferedWriter cert_chain_out = new BufferedWriter(new FileWriter(certChainFile));) {
cert_chain_out.write(certChain);
}catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
}
if(rootCACert != null){
rootCACertFile = File.createTempFile("rootcert", null);
rootCACertFilePath = rootCACertFile.getAbsolutePath();
try(BufferedWriter root_ca_cert_file = new BufferedWriter(new FileWriter(rootCACertFile));) {
root_ca_cert_file.write(rootCACert);
}catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
}
configureSSL(prvkeyPath, pubCertFilePath, certChainFilePath, rootCACertFilePath);
prvKeyFile.delete();
pubCertFile.delete();
if(certChainFile != null){
certChainFile.delete();
}
if(rootCACertFile != null){
rootCACertFile.delete();
}
} catch (IOException e) {
s_logger.debug("Failed to config ssl: " + e.toString());
}
}
}
private Answer execute(SecStorageSetupCommand cmd) {
if (!_inSystemVM) {
return new Answer(cmd, true, null);
}
Answer answer = null;
DataStoreTO dStore = cmd.getDataStore();
if (dStore instanceof NfsTO) {
String secUrl = cmd.getSecUrl();
try {
URI uri = new URI(secUrl);
String nfsHostIp = getUriHostIp(uri);
addRouteToInternalIpOrCidr(_storageGateway, _storageIp, _storageNetmask, nfsHostIp);
String dir = mountUri(uri);
configCerts(cmd.getCerts());
nfsIps.add(nfsHostIp);
answer = new SecStorageSetupAnswer(dir);
} catch (Exception e) {
String msg = "GetRootDir for " + secUrl + " failed due to " + e.toString();
s_logger.error(msg);
answer = new Answer(cmd, false, msg);
}
} else {
// TODO: what do we need to setup for S3/Swift, maybe need to mount
// to some cache storage
answer = new Answer(cmd, true, null);
}
savePostUploadPSK(cmd.getPostUploadKey());
startPostUploadServer();
return answer;
}
private void startPostUploadServer() {
final int PORT = 8210;
final int NO_OF_WORKERS = 15;
final EventLoopGroup bossGroup = new NioEventLoopGroup(1);
final EventLoopGroup workerGroup = new NioEventLoopGroup(NO_OF_WORKERS);
final ServerBootstrap b = new ServerBootstrap();
final NfsSecondaryStorageResource storageResource = this;
b.group(bossGroup, workerGroup);
b.channel(NioServerSocketChannel.class);
b.handler(new LoggingHandler(LogLevel.INFO));
b.childHandler(new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(new HttpRequestDecoder());
pipeline.addLast(new HttpResponseEncoder());
pipeline.addLast(new HttpContentCompressor());
pipeline.addLast(new HttpUploadServerHandler(storageResource));
}
});
new Thread() {
@Override
public void run() {
try {
Channel ch = b.bind(PORT).sync().channel();
s_logger.info(String.format("Started post upload server on port %d with %d workers",PORT,NO_OF_WORKERS));
ch.closeFuture().sync();
} catch (InterruptedException e) {
s_logger.info("Failed to start post upload server");
s_logger.debug("Exception while starting post upload server", e);
} finally {
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
s_logger.info("shutting down post upload server");
}
}
}.start();
s_logger.info("created a thread to start post upload server");
}
private void savePostUploadPSK(String psk) {
try {
FileUtils.writeStringToFile(new File(POST_UPLOAD_KEY_LOCATION),psk, "utf-8");
} catch (IOException ex) {
s_logger.debug("Failed to copy PSK to the file.", ex);
}
}
protected Answer deleteSnapshot(final DeleteCommand cmd) {
DataTO obj = cmd.getData();
DataStoreTO dstore = obj.getDataStore();
if (dstore instanceof NfsTO) {
NfsTO nfs = (NfsTO)dstore;
String parent = getRootDir(nfs.getUrl());
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String snapshotPath = obj.getPath();
if (snapshotPath.startsWith(File.separator)) {
snapshotPath = snapshotPath.substring(1);
}
// check if the passed snapshot path is a directory or not. For ImageCache, path is stored as a directory instead of
// snapshot file name. If so, since backupSnapshot process has already deleted snapshot in cache, so we just do nothing
// and return true.
String fullSnapPath = parent + snapshotPath;
File snapDir = new File(fullSnapPath);
if (snapDir.exists() && snapDir.isDirectory()) {
s_logger.debug("snapshot path " + snapshotPath + " is a directory, already deleted during backup snapshot, so no need to delete");
return new Answer(cmd, true, null);
}
// passed snapshot path is a snapshot file path, then get snapshot directory first
int index = snapshotPath.lastIndexOf("/");
String snapshotName = snapshotPath.substring(index + 1);
snapshotPath = snapshotPath.substring(0, index);
String absoluteSnapshotPath = parent + snapshotPath;
// check if snapshot directory exists
File snapshotDir = new File(absoluteSnapshotPath);
String details = null;
if (!snapshotDir.exists()) {
details = "snapshot directory " + snapshotDir.getName() + " doesn't exist";
s_logger.debug(details);
return new Answer(cmd, true, details);
}
// delete snapshot in the directory if exists
String lPath = absoluteSnapshotPath + "/*" + snapshotName + "*";
String result = deleteLocalFile(lPath);
if (result != null) {
details = "failed to delete snapshot " + lPath + " , err=" + result;
s_logger.warn(details);
return new Answer(cmd, false, details);
}
return new Answer(cmd, true, null);
} else if (dstore instanceof S3TO) {
final S3TO s3 = (S3TO)dstore;
final String path = obj.getPath();
final String bucket = s3.getBucketName();
try {
S3Utils.deleteObject(s3, bucket, path);
return new Answer(cmd, true, String.format("Deleted snapshot %1%s from bucket %2$s.", path, bucket));
} catch (Exception e) {
final String errorMessage =
String.format("Failed to delete snapshot %1$s from bucket %2$s due to the following error: %3$s", path, bucket, e.getMessage());
s_logger.error(errorMessage, e);
return new Answer(cmd, false, errorMessage);
}
} else if (dstore instanceof SwiftTO) {
SwiftTO swiftTO = (SwiftTO)dstore;
String path = obj.getPath();
SwiftUtil.deleteObject(swiftTO, path);
return new Answer(cmd, true, "Deleted snapshot " + path + " from swift");
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
Map<String, TemplateProp> swiftListTemplate(SwiftTO swift) {
String[] containers = SwiftUtil.list(swift, "", null);
if (containers == null) {
return null;
}
Map<String, TemplateProp> tmpltInfos = new HashMap<String, TemplateProp>();
for (String container : containers) {
if (container.startsWith("T-")) {
String[] files = SwiftUtil.list(swift, container, "template.properties");
if (files.length != 1) {
continue;
}
try {
File tempFile = File.createTempFile("template", ".tmp");
File tmpFile = SwiftUtil.getObject(swift, tempFile, container + File.separator + "template.properties");
if (tmpFile == null) {
continue;
}
try (FileReader fr = new FileReader(tmpFile);
BufferedReader brf = new BufferedReader(fr);) {
String line = null;
String uniqName = null;
Long size = null;
String name = null;
while ((line = brf.readLine()) != null) {
if (line.startsWith("uniquename=")) {
uniqName = line.split("=")[1];
} else if (line.startsWith("size=")) {
size = Long.parseLong(line.split("=")[1]);
} else if (line.startsWith("filename=")) {
name = line.split("=")[1];
}
}
tempFile.delete();
if (uniqName != null) {
TemplateProp prop = new TemplateProp(uniqName, container + File.separator + name, size, size, true, false);
tmpltInfos.put(uniqName, prop);
}
} catch (IOException ex)
{
s_logger.debug("swiftListTemplate:Exception:" + ex.getMessage());
continue;
}
} catch (IOException e) {
s_logger.debug("Failed to create templ file:" + e.toString());
continue;
} catch (Exception e) {
s_logger.debug("Failed to get properties: " + e.toString());
continue;
}
}
}
return tmpltInfos;
}
Map<String, TemplateProp> s3ListTemplate(S3TO s3) {
String bucket = s3.getBucketName();
// List the objects in the source directory on S3
final List<S3ObjectSummary> objectSummaries = S3Utils.getDirectory(s3, bucket, TEMPLATE_ROOT_DIR);
if (objectSummaries == null) {
return null;
}
Map<String, TemplateProp> tmpltInfos = new HashMap<String, TemplateProp>();
for (S3ObjectSummary objectSummary : objectSummaries) {
String key = objectSummary.getKey();
// String installPath = StringUtils.substringBeforeLast(key,
// S3Utils.SEPARATOR);
String uniqueName = determineS3TemplateNameFromKey(key);
// TODO: isPublic value, where to get?
TemplateProp tInfo = new TemplateProp(uniqueName, key, objectSummary.getSize(), objectSummary.getSize(), true, false);
tmpltInfos.put(uniqueName, tInfo);
}
return tmpltInfos;
}
Map<Long, TemplateProp> s3ListVolume(S3TO s3) {
String bucket = s3.getBucketName();
// List the objects in the source directory on S3
final List<S3ObjectSummary> objectSummaries = S3Utils.getDirectory(s3, bucket, VOLUME_ROOT_DIR);
if (objectSummaries == null) {
return null;
}
Map<Long, TemplateProp> tmpltInfos = new HashMap<Long, TemplateProp>();
for (S3ObjectSummary objectSummary : objectSummaries) {
String key = objectSummary.getKey();
// String installPath = StringUtils.substringBeforeLast(key,
// S3Utils.SEPARATOR);
Long id = determineS3VolumeIdFromKey(key);
// TODO: how to get volume template name
TemplateProp tInfo = new TemplateProp(id.toString(), key, objectSummary.getSize(), objectSummary.getSize(), true, false);
tmpltInfos.put(id, tInfo);
}
return tmpltInfos;
}
private Answer execute(ListTemplateCommand cmd) {
if (!_inSystemVM) {
return new ListTemplateAnswer(null, null);
}
DataStoreTO store = cmd.getDataStore();
if (store instanceof NfsTO) {
NfsTO nfs = (NfsTO)store;
String secUrl = nfs.getUrl();
String root = getRootDir(secUrl);
Map<String, TemplateProp> templateInfos = _dlMgr.gatherTemplateInfo(root);
return new ListTemplateAnswer(secUrl, templateInfos);
} else if (store instanceof SwiftTO) {
SwiftTO swift = (SwiftTO)store;
Map<String, TemplateProp> templateInfos = swiftListTemplate(swift);
return new ListTemplateAnswer(swift.toString(), templateInfos);
} else if (store instanceof S3TO) {
S3TO s3 = (S3TO)store;
Map<String, TemplateProp> templateInfos = s3ListTemplate(s3);
return new ListTemplateAnswer(s3.getBucketName(), templateInfos);
} else {
return new Answer(cmd, false, "Unsupported image data store: " + store);
}
}
private Answer execute(ListVolumeCommand cmd) {
if (!_inSystemVM) {
return new ListVolumeAnswer(cmd.getSecUrl(), null);
}
DataStoreTO store = cmd.getDataStore();
if (store instanceof NfsTO) {
String root = getRootDir(cmd.getSecUrl());
Map<Long, TemplateProp> templateInfos = _dlMgr.gatherVolumeInfo(root);
return new ListVolumeAnswer(cmd.getSecUrl(), templateInfos);
} else if (store instanceof S3TO) {
S3TO s3 = (S3TO)store;
Map<Long, TemplateProp> templateInfos = s3ListVolume(s3);
return new ListVolumeAnswer(s3.getBucketName(), templateInfos);
} else {
return new Answer(cmd, false, "Unsupported image data store: " + store);
}
}
private Answer execute(SecStorageVMSetupCommand cmd) {
if (!_inSystemVM) {
return new Answer(cmd, true, null);
}
boolean success = true;
StringBuilder result = new StringBuilder();
for (String cidr : cmd.getAllowedInternalSites()) {
if (nfsIps.contains(cidr)) {
/*
* if the internal download ip is the same with secondary
* storage ip, adding internal sites will flush ip route to nfs
* through storage ip.
*/
continue;
}
String tmpresult = allowOutgoingOnPrivate(cidr);
if (tmpresult != null) {
result.append(", ").append(tmpresult);
success = false;
}
}
if (success) {
if (cmd.getCopyPassword() != null && cmd.getCopyUserName() != null) {
String tmpresult = configureAuth(cmd.getCopyUserName(), cmd.getCopyPassword());
if (tmpresult != null) {
result.append("Failed to configure auth for copy ").append(tmpresult);
success = false;
}
}
}
return new Answer(cmd, success, result.toString());
}
private String deleteLocalFile(String fullPath) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("rm -rf " + fullPath);
String result = command.execute();
if (result != null) {
String errMsg = "Failed to delete file " + fullPath + ", err=" + result;
s_logger.warn(errMsg);
return errMsg;
}
return null;
}
public String allowOutgoingOnPrivate(String destCidr) {
if (!_inSystemVM) {
return null;
}
Script command = new Script("/bin/bash", s_logger);
String intf = "eth1";
command.add("-c");
command.add("iptables -I OUTPUT -o " + intf + " -d " + destCidr + " -p tcp -m state --state NEW -m tcp -j ACCEPT");
String result = command.execute();
if (result != null) {
s_logger.warn("Error in allowing outgoing to " + destCidr + ", err=" + result);
return "Error in allowing outgoing to " + destCidr + ", err=" + result;
}
addRouteToInternalIpOrCidr(_localgw, _eth1ip, _eth1mask, destCidr);
return null;
}
private Answer execute(SecStorageFirewallCfgCommand cmd) {
if (!_inSystemVM) {
return new Answer(cmd, true, null);
}
List<String> ipList = new ArrayList<String>();
for (PortConfig pCfg : cmd.getPortConfigs()) {
if (pCfg.isAdd()) {
ipList.add(pCfg.getSourceIp());
}
}
boolean success = true;
String result;
result = configureIpFirewall(ipList, cmd.getIsAppendAIp());
if (result != null) {
success = false;
}
return new Answer(cmd, success, result);
}
private UploadStatusAnswer execute(UploadStatusCommand cmd) {
String entityUuid = cmd.getEntityUuid();
if (uploadEntityStateMap.containsKey(entityUuid)) {
UploadEntity uploadEntity = uploadEntityStateMap.get(entityUuid);
if (uploadEntity.getUploadState() == UploadEntity.Status.ERROR) {
uploadEntityStateMap.remove(entityUuid);
return new UploadStatusAnswer(cmd, UploadStatus.ERROR, uploadEntity.getErrorMessage());
} else if (uploadEntity.getUploadState() == UploadEntity.Status.COMPLETED) {
UploadStatusAnswer answer = new UploadStatusAnswer(cmd, UploadStatus.COMPLETED);
answer.setVirtualSize(uploadEntity.getVirtualSize());
answer.setInstallPath(uploadEntity.getTmpltPath());
answer.setPhysicalSize(uploadEntity.getPhysicalSize());
answer.setDownloadPercent(100);
uploadEntityStateMap.remove(entityUuid);
return answer;
} else if (uploadEntity.getUploadState() == UploadEntity.Status.IN_PROGRESS) {
UploadStatusAnswer answer = new UploadStatusAnswer(cmd, UploadStatus.IN_PROGRESS);
long downloadedSize = FileUtils.sizeOfDirectory(new File(uploadEntity.getInstallPathPrefix()));
int downloadPercent = (int) (100 * downloadedSize / uploadEntity.getContentLength());
answer.setDownloadPercent(Math.min(downloadPercent, 100));
return answer;
}
}
return new UploadStatusAnswer(cmd, UploadStatus.UNKNOWN);
}
protected GetStorageStatsAnswer execute(final GetStorageStatsCommand cmd) {
DataStoreTO store = cmd.getStore();
if (store instanceof S3TO || store instanceof SwiftTO) {
long infinity = Integer.MAX_VALUE;
return new GetStorageStatsAnswer(cmd, infinity, 0L);
}
String rootDir = getRootDir(((NfsTO)store).getUrl());
final long usedSize = getUsedSize(rootDir);
final long totalSize = getTotalSize(rootDir);
if (usedSize == -1 || totalSize == -1) {
return new GetStorageStatsAnswer(cmd, "Unable to get storage stats");
} else {
return new GetStorageStatsAnswer(cmd, totalSize, usedSize);
}
}
protected Answer execute(final DeleteCommand cmd) {
DataTO obj = cmd.getData();
DataObjectType objType = obj.getObjectType();
if (obj.getPath() == null) {
// account for those fake entries for NFS migration to object store
return new Answer(cmd, true, "Object with null install path does not exist on image store , no need to delete");
}
switch (objType) {
case TEMPLATE:
return deleteTemplate(cmd);
case VOLUME:
return deleteVolume(cmd);
case SNAPSHOT:
return deleteSnapshot(cmd);
}
return Answer.createUnsupportedCommandAnswer(cmd);
}
protected Answer deleteTemplate(DeleteCommand cmd) {
DataTO obj = cmd.getData();
DataStoreTO dstore = obj.getDataStore();
if (dstore instanceof NfsTO) {
NfsTO nfs = (NfsTO)dstore;
String relativeTemplatePath = obj.getPath();
String parent = getRootDir(nfs.getUrl());
if (relativeTemplatePath.startsWith(File.separator)) {
relativeTemplatePath = relativeTemplatePath.substring(1);
}
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String absoluteTemplatePath = parent + relativeTemplatePath;
File tmpltPath = new File(absoluteTemplatePath);
File tmpltParent = null;
if(tmpltPath.exists() && tmpltPath.isDirectory()) {
tmpltParent = tmpltPath;
} else {
tmpltParent = tmpltPath.getParentFile();
}
String details = null;
if (!tmpltParent.exists()) {
details = "template parent directory " + tmpltParent.getName() + " doesn't exist";
s_logger.debug(details);
return new Answer(cmd, true, details);
}
File[] tmpltFiles = tmpltParent.listFiles();
if (tmpltFiles == null || tmpltFiles.length == 0) {
details = "No files under template parent directory " + tmpltParent.getName();
s_logger.debug(details);
} else {
boolean found = false;
for (File f : tmpltFiles) {
if (!found && f.getName().equals("template.properties")) {
found = true;
}
// KVM HA monitor makes a mess in the templates with its
// heartbeat tests
// Don't let this stop us from cleaning up the template
if (f.isDirectory() && f.getName().equals("KVMHA")) {
s_logger.debug("Deleting KVMHA directory contents from template location");
File[] haFiles = f.listFiles();
for (File haFile : haFiles) {
haFile.delete();
}
}
if (!f.delete()) {
return new Answer(cmd, false, "Unable to delete file " + f.getName() + " under Template path " + relativeTemplatePath);
}
}
if (!found) {
details = "Can not find template.properties under " + tmpltParent.getName();
s_logger.debug(details);
}
}
if (!tmpltParent.delete()) {
details = "Unable to delete directory " + tmpltParent.getName() + " under Template path " + relativeTemplatePath;
s_logger.debug(details);
return new Answer(cmd, false, details);
}
return new Answer(cmd, true, null);
} else if (dstore instanceof S3TO) {
final S3TO s3 = (S3TO)dstore;
final String path = obj.getPath();
final String bucket = s3.getBucketName();
try {
S3Utils.deleteDirectory(s3, bucket, path);
return new Answer(cmd, true, String.format("Deleted template %1$s from bucket %2$s.", path, bucket));
} catch (Exception e) {
final String errorMessage =
String.format("Failed to delete template %1$s from bucket %2$s due to the following error: %3$s", path, bucket, e.getMessage());
s_logger.error(errorMessage, e);
return new Answer(cmd, false, errorMessage);
}
} else if (dstore instanceof SwiftTO) {
SwiftTO swift = (SwiftTO)dstore;
String container = "T-" + obj.getId();
String object = "";
try {
String result = swiftDelete(swift, container, object);
if (result != null) {
String errMsg = "failed to delete object " + container + "/" + object + " , err=" + result;
s_logger.warn(errMsg);
return new Answer(cmd, false, errMsg);
}
return new Answer(cmd, true, "success");
} catch (Exception e) {
String errMsg = cmd + " Command failed due to " + e.toString();
s_logger.warn(errMsg, e);
return new Answer(cmd, false, errMsg);
}
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
protected Answer deleteVolume(final DeleteCommand cmd) {
DataTO obj = cmd.getData();
DataStoreTO dstore = obj.getDataStore();
if (dstore instanceof NfsTO) {
NfsTO nfs = (NfsTO)dstore;
String relativeVolumePath = obj.getPath();
String parent = getRootDir(nfs.getUrl());
if (relativeVolumePath.startsWith(File.separator)) {
relativeVolumePath = relativeVolumePath.substring(1);
}
if (!parent.endsWith(File.separator)) {
parent += File.separator;
}
String absoluteVolumePath = parent + relativeVolumePath;
File volPath = new File(absoluteVolumePath);
File tmpltParent = null;
if (volPath.exists() && volPath.isDirectory()) {
// for vmware, absoluteVolumePath represents a directory where volume files are located.
tmpltParent = volPath;
} else {
// for other hypervisors, the volume .vhd or .qcow2 file path is passed
tmpltParent = new File(absoluteVolumePath).getParentFile();
}
String details = null;
if (!tmpltParent.exists()) {
details = "volume parent directory " + tmpltParent.getName() + " doesn't exist";
s_logger.debug(details);
return new Answer(cmd, true, details);
}
File[] tmpltFiles = tmpltParent.listFiles();
if (tmpltFiles == null || tmpltFiles.length == 0) {
details = "No files under volume parent directory " + tmpltParent.getName();
s_logger.debug(details);
} else {
boolean found = false;
for (File f : tmpltFiles) {
if (!found && f.getName().equals("volume.properties")) {
found = true;
}
// KVM HA monitor makes a mess in the templates with its
// heartbeat tests
// Don't let this stop us from cleaning up the template
if (f.isDirectory() && f.getName().equals("KVMHA")) {
s_logger.debug("Deleting KVMHA directory contents from template location");
File[] haFiles = f.listFiles();
for (File haFile : haFiles) {
haFile.delete();
}
}
if (!f.delete()) {
return new Answer(cmd, false, "Unable to delete file " + f.getName() + " under Volume path " + tmpltParent.getPath());
}
}
if (!found) {
details = "Can not find volume.properties under " + tmpltParent.getName();
s_logger.debug(details);
}
}
if (!tmpltParent.delete()) {
details = "Unable to delete directory " + tmpltParent.getName() + " under Volume path " + tmpltParent.getPath();
s_logger.debug(details);
return new Answer(cmd, false, details);
}
return new Answer(cmd, true, null);
} else if (dstore instanceof S3TO) {
final S3TO s3 = (S3TO)dstore;
final String path = obj.getPath();
final String bucket = s3.getBucketName();
try {
S3Utils.deleteDirectory(s3, bucket, path);
return new Answer(cmd, true, String.format("Deleted volume %1%s from bucket %2$s.", path, bucket));
} catch (Exception e) {
final String errorMessage = String.format("Failed to delete volume %1$s from bucket %2$s due to the following error: %3$s", path, bucket, e.getMessage());
s_logger.error(errorMessage, e);
return new Answer(cmd, false, errorMessage);
}
} else if (dstore instanceof SwiftTO) {
Long volumeId = obj.getId();
String path = obj.getPath();
String filename = StringUtils.substringAfterLast(path, "/"); // assuming
// that
// the
// filename
// is
// the
// last
// section
// in
// the
// path
String result = swiftDelete((SwiftTO)dstore, "V-" + volumeId.toString(), filename);
if (result != null) {
String errMsg = "failed to delete volume " + filename + " , err=" + result;
s_logger.warn(errMsg);
return new Answer(cmd, false, errMsg);
}
return new Answer(cmd, true, "Deleted volume " + path + " from swift");
} else {
return new Answer(cmd, false, "Unsupported image data store: " + dstore);
}
}
@Override
synchronized public String getRootDir(String secUrl) {
if (!_inSystemVM) {
return _parent;
}
try {
URI uri = new URI(secUrl);
String dir = mountUri(uri);
return _parent + "/" + dir;
} catch (Exception e) {
String msg = "GetRootDir for " + secUrl + " failed due to " + e.toString();
s_logger.error(msg, e);
throw new CloudRuntimeException(msg);
}
}
protected long getUsedSize(String rootDir) {
return _storage.getUsedSpace(rootDir);
}
protected long getTotalSize(String rootDir) {
return _storage.getTotalSpace(rootDir);
}
protected long convertFilesystemSize(final String size) {
if (size == null || size.isEmpty()) {
return -1;
}
long multiplier = 1;
if (size.endsWith("T")) {
multiplier = 1024l * 1024l * 1024l * 1024l;
} else if (size.endsWith("G")) {
multiplier = 1024l * 1024l * 1024l;
} else if (size.endsWith("M")) {
multiplier = 1024l * 1024l;
} else {
assert (false) : "Well, I have no idea what this is: " + size;
}
return (long)(Double.parseDouble(size.substring(0, size.length() - 1)) * multiplier);
}
@Override
public Type getType() {
if (SecondaryStorageVm.Role.templateProcessor.toString().equals(_role)) {
return Host.Type.SecondaryStorage;
}
return Host.Type.SecondaryStorageCmdExecutor;
}
@Override
public PingCommand getCurrentStatus(final long id) {
return new PingStorageCommand(Host.Type.Storage, id, new HashMap<String, Boolean>());
}
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
_eth1ip = (String)params.get("eth1ip");
_eth1mask = (String)params.get("eth1mask");
if (_eth1ip != null) { // can only happen inside service vm
params.put("private.network.device", "eth1");
} else {
s_logger.warn("eth1ip parameter has not been configured, assuming that we are not inside a system vm");
}
String eth2ip = (String)params.get("eth2ip");
if (eth2ip != null) {
params.put("public.network.device", "eth2");
}
_publicIp = (String)params.get("eth2ip");
_hostname = (String)params.get("name");
String inSystemVM = (String)params.get("secondary.storage.vm");
if (inSystemVM == null || "true".equalsIgnoreCase(inSystemVM)) {
s_logger.debug("conf secondary.storage.vm is true, act as if executing in SSVM");
_inSystemVM = true;
}
_storageIp = (String)params.get("storageip");
if (_storageIp == null && _inSystemVM) {
s_logger.warn("There is no storageip in /proc/cmdline, something wrong!");
}
_storageNetmask = (String)params.get("storagenetmask");
_storageGateway = (String)params.get("storagegateway");
super.configure(name, params);
_params = params;
String value = (String)params.get("scripts.timeout");
_timeout = NumbersUtil.parseInt(value, 1440) * 1000;
_storage = (StorageLayer)params.get(StorageLayer.InstanceConfigKey);
configureStorageLayerClass(params);
if (_inSystemVM) {
_storage.mkdirs(_parent);
}
_configSslScr = Script.findScript(getDefaultScriptsDir(), "config_ssl.sh");
if (_configSslScr != null) {
s_logger.info("config_ssl.sh found in " + _configSslScr);
}
_configAuthScr = Script.findScript(getDefaultScriptsDir(), "config_auth.sh");
if (_configAuthScr != null) {
s_logger.info("config_auth.sh found in " + _configAuthScr);
}
_configIpFirewallScr = Script.findScript(getDefaultScriptsDir(), "ipfirewall.sh");
if (_configIpFirewallScr != null) {
s_logger.info("_configIpFirewallScr found in " + _configIpFirewallScr);
}
createTemplateFromSnapshotXenScript = Script.findScript(getDefaultScriptsDir(), "create_privatetemplate_from_snapshot_xen.sh");
if (createTemplateFromSnapshotXenScript == null) {
throw new ConfigurationException("create_privatetemplate_from_snapshot_xen.sh not found in " + getDefaultScriptsDir());
}
_role = (String)params.get("role");
if (_role == null) {
_role = SecondaryStorageVm.Role.templateProcessor.toString();
}
s_logger.info("Secondary storage runs in role " + _role);
_guid = (String)params.get("guid");
if (_guid == null) {
throw new ConfigurationException("Unable to find the guid");
}
_dc = (String)params.get("zone");
if (_dc == null) {
throw new ConfigurationException("Unable to find the zone");
}
_pod = (String)params.get("pod");
_instance = (String)params.get("instance");
if (!_inSystemVM) {
_parent = (String)params.get("mount.path");
}
if (_inSystemVM) {
_localgw = (String)params.get("localgw");
if (_localgw != null) { // can only happen inside service vm
String mgmtHost = (String)params.get("host");
addRouteToInternalIpOrCidr(_localgw, _eth1ip, _eth1mask, mgmtHost);
String internalDns1 = (String)params.get("internaldns1");
if (internalDns1 == null) {
s_logger.warn("No DNS entry found during configuration of NfsSecondaryStorage");
} else {
addRouteToInternalIpOrCidr(_localgw, _eth1ip, _eth1mask, internalDns1);
}
String internalDns2 = (String)params.get("internaldns2");
if (internalDns2 != null) {
addRouteToInternalIpOrCidr(_localgw, _eth1ip, _eth1mask, internalDns2);
}
}
startAdditionalServices();
_params.put("install.numthreads", "50");
_params.put("secondary.storage.vm", "true");
}
try {
_params.put(StorageLayer.InstanceConfigKey, _storage);
_dlMgr = new DownloadManagerImpl();
_dlMgr.configure("DownloadManager", _params);
_upldMgr = new UploadManagerImpl();
_upldMgr.configure("UploadManager", params);
} catch (ConfigurationException e) {
s_logger.warn("Caught problem while configuring DownloadManager", e);
return false;
}
return true;
}
protected void configureStorageLayerClass(Map<String, Object> params) throws ConfigurationException {
String value;
if (_storage == null) {
value = (String)params.get(StorageLayer.ClassConfigKey);
if (value == null) {
value = "com.cloud.storage.JavaStorageLayer";
}
try {
Class<?> clazz = Class.forName(value);
_storage = (StorageLayer)clazz.newInstance();
_storage.configure("StorageLayer", params);
} catch (ClassNotFoundException e) {
throw new ConfigurationException("Unable to find class " + value);
} catch (InstantiationException e) {
throw new ConfigurationException("Unable to find class " + value);
} catch (IllegalAccessException e) {
throw new ConfigurationException("Unable to find class " + value);
}
}
}
private void startAdditionalServices() {
if (!_inSystemVM) {
return;
}
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("if [ -f /etc/init.d/ssh ]; then service ssh restart; else service sshd restart; fi ");
String result = command.execute();
if (result != null) {
s_logger.warn("Error in starting sshd service err=" + result);
}
command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("iptables -I INPUT -i eth1 -p tcp -m state --state NEW -m tcp --dport 3922 -j ACCEPT");
result = command.execute();
if (result != null) {
s_logger.warn("Error in opening up ssh port err=" + result);
}
}
private void addRouteToInternalIpOrCidr(String localgw, String eth1ip, String eth1mask, String destIpOrCidr) {
if (!_inSystemVM) {
return;
}
s_logger.debug("addRouteToInternalIp: localgw=" + localgw + ", eth1ip=" + eth1ip + ", eth1mask=" + eth1mask + ",destIp=" + destIpOrCidr);
if (destIpOrCidr == null) {
s_logger.debug("addRouteToInternalIp: destIp is null");
return;
}
if (!NetUtils.isValidIp(destIpOrCidr) && !NetUtils.isValidCIDR(destIpOrCidr)) {
s_logger.warn(" destIp is not a valid ip address or cidr destIp=" + destIpOrCidr);
return;
}
boolean inSameSubnet = false;
if (NetUtils.isValidIp(destIpOrCidr)) {
if (eth1ip != null && eth1mask != null) {
inSameSubnet = NetUtils.sameSubnet(eth1ip, destIpOrCidr, eth1mask);
} else {
s_logger.warn("addRouteToInternalIp: unable to determine same subnet: _eth1ip=" + eth1ip + ", dest ip=" + destIpOrCidr + ", _eth1mask=" + eth1mask);
}
} else {
inSameSubnet = NetUtils.isNetworkAWithinNetworkB(destIpOrCidr, NetUtils.ipAndNetMaskToCidr(eth1ip, eth1mask));
}
if (inSameSubnet) {
s_logger.debug("addRouteToInternalIp: dest ip " + destIpOrCidr + " is in the same subnet as eth1 ip " + eth1ip);
return;
}
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("ip route delete " + destIpOrCidr);
command.execute();
command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("ip route add " + destIpOrCidr + " via " + localgw);
String result = command.execute();
if (result != null) {
s_logger.warn("Error in configuring route to internal ip err=" + result);
} else {
s_logger.debug("addRouteToInternalIp: added route to internal ip=" + destIpOrCidr + " via " + localgw);
}
}
private void configureSSL() {
if (!_inSystemVM) {
return;
}
Script command = new Script(_configSslScr);
command.add("-i", _publicIp);
command.add("-h", _hostname);
String result = command.execute();
if (result != null) {
s_logger.warn("Unable to configure httpd to use ssl");
}
}
private void configureSSL(String prvkeyPath, String prvCertPath, String certChainPath, String rootCACert) {
if (!_inSystemVM) {
return;
}
Script command = new Script(_configSslScr);
command.add("-i", _publicIp);
command.add("-h", _hostname);
command.add("-k", prvkeyPath);
command.add("-p", prvCertPath);
if (certChainPath != null) {
command.add("-t", certChainPath);
}
if (rootCACert != null) {
command.add("-u", rootCACert);
}
String result = command.execute();
if (result != null) {
s_logger.warn("Unable to configure httpd to use ssl");
}
}
private String configureAuth(String user, String passwd) {
Script command = new Script(_configAuthScr);
command.add(user);
command.add(passwd);
String result = command.execute();
if (result != null) {
s_logger.warn("Unable to configure httpd to use auth");
}
return result;
}
private String configureIpFirewall(List<String> ipList, boolean isAppend) {
Script command = new Script(_configIpFirewallScr);
command.add(String.valueOf(isAppend));
for (String ip : ipList) {
command.add(ip);
}
String result = command.execute();
if (result != null) {
s_logger.warn("Unable to configure firewall for command : " + command);
}
return result;
}
/**
* Mount remote device named on local file system on subfolder of _parent
* field.
* <p>
*
* Supported schemes are "nfs" and "cifs".
* <p>
*
* CIFS parameters are documented with mount.cifs at
* http://linux.die.net/man/8/mount.cifs
* For simplicity, when a URI is used to specify a CIFS share,
* options such as domain,user,password are passed as query parameters.
*
* @param uri
* crresponding to the remote device. Will throw for unsupported
* scheme.
* @return name of folder in _parent that device was mounted.
* @throws UnknownHostException
*/
protected String mountUri(URI uri) throws UnknownHostException {
String uriHostIp = getUriHostIp(uri);
String nfsPath = uriHostIp + ":" + uri.getPath();
// Single means of calculating mount directory regardless of scheme
String dir = UUID.nameUUIDFromBytes(nfsPath.getBytes()).toString();
String localRootPath = _parent + "/" + dir;
// remote device syntax varies by scheme.
String remoteDevice;
if (uri.getScheme().equals("cifs")) {
remoteDevice = "//" + uriHostIp + uri.getPath();
s_logger.debug("Mounting device with cifs-style path of " + remoteDevice);
} else {
remoteDevice = nfsPath;
s_logger.debug("Mounting device with nfs-style path of " + remoteDevice);
}
mount(localRootPath, remoteDevice, uri);
return dir;
}
protected void umount(String localRootPath, URI uri) {
ensureLocalRootPathExists(localRootPath, uri);
if (!mountExists(localRootPath, uri)) {
return;
}
Script command = new Script(!_inSystemVM, "mount", _timeout, s_logger);
command.add(localRootPath);
String result = command.execute();
if (result != null) {
// Fedora Core 12 errors out with any -o option executed from java
String errMsg = "Unable to umount " + localRootPath + " due to " + result;
s_logger.error(errMsg);
File file = new File(localRootPath);
if (file.exists()) {
file.delete();
}
throw new CloudRuntimeException(errMsg);
}
s_logger.debug("Successfully umounted " + localRootPath);
}
protected void mount(String localRootPath, String remoteDevice, URI uri) {
s_logger.debug("mount " + uri.toString() + " on " + localRootPath);
ensureLocalRootPathExists(localRootPath, uri);
if (mountExists(localRootPath, uri)) {
return;
}
attemptMount(localRootPath, remoteDevice, uri);
// XXX: Adding the check for creation of snapshots dir here. Might have
// to move it somewhere more logical later.
checkForSnapshotsDir(localRootPath);
checkForVolumesDir(localRootPath);
}
protected void attemptMount(String localRootPath, String remoteDevice, URI uri) {
String result;
s_logger.debug("Make cmdline call to mount " + remoteDevice + " at " + localRootPath + " based on uri " + uri);
Script command = new Script(!_inSystemVM, "mount", _timeout, s_logger);
String scheme = uri.getScheme().toLowerCase();
command.add("-t", scheme);
if (scheme.equals("nfs")) {
if ("Mac OS X".equalsIgnoreCase(System.getProperty("os.name"))) {
// See http://wiki.qnap.com/wiki/Mounting_an_NFS_share_from_OS_X
command.add("-o", "resvport");
}
if (_inSystemVM) {
command.add("-o", "soft,timeo=133,retrans=2147483647,tcp,acdirmax=0,acdirmin=0");
}
} else if (scheme.equals("cifs")) {
String extraOpts = parseCifsMountOptions(uri);
// nfs acdirmax / acdirmin correspoonds to CIFS actimeo (see
// http://linux.die.net/man/8/mount.cifs)
// no equivalent to nfs timeo, retrans or tcp in CIFS
// todo: allow security mode to be set.
command.add("-o", extraOpts + "soft,actimeo=0");
} else {
String errMsg = "Unsupported storage device scheme " + scheme + " in uri " + uri.toString();
s_logger.error(errMsg);
throw new CloudRuntimeException(errMsg);
}
command.add(remoteDevice);
command.add(localRootPath);
result = command.execute();
if (result != null) {
// Fedora Core 12 errors out with any -o option executed from java
String errMsg = "Unable to mount " + remoteDevice + " at " + localRootPath + " due to " + result;
s_logger.error(errMsg);
File file = new File(localRootPath);
if (file.exists()) {
file.delete();
}
throw new CloudRuntimeException(errMsg);
}
s_logger.debug("Successfully mounted " + remoteDevice + " at " + localRootPath);
}
protected String parseCifsMountOptions(URI uri) {
List<NameValuePair> args = URLEncodedUtils.parse(uri, "UTF-8");
boolean foundUser = false;
boolean foundPswd = false;
StringBuilder extraOpts = new StringBuilder();
for (NameValuePair nvp : args) {
String name = nvp.getName();
if (name.equals("user")) {
foundUser = true;
s_logger.debug("foundUser is" + foundUser);
} else if (name.equals("password")) {
foundPswd = true;
s_logger.debug("password is present in uri");
}
extraOpts.append(name + "=" + nvp.getValue() + ",");
}
if (s_logger.isDebugEnabled()) {
s_logger.error("extraOpts now " + extraOpts);
}
if (!foundUser || !foundPswd) {
String errMsg =
"Missing user and password from URI. Make sure they" + "are in the query string and separated by '&'. E.g. "
+ "cifs://example.com/some_share?user=foo&password=bar";
s_logger.error(errMsg);
throw new CloudRuntimeException(errMsg);
}
return extraOpts.toString();
}
protected boolean mountExists(String localRootPath, URI uri) {
Script script = null;
script = new Script(!_inSystemVM, "mount", _timeout, s_logger);
List<String> res = new ArrayList<String>();
ZfsPathParser parser = new ZfsPathParser(localRootPath);
script.execute(parser);
res.addAll(parser.getPaths());
for (String s : res) {
if (s.contains(localRootPath)) {
s_logger.debug("Some device already mounted at " + localRootPath + ", no need to mount " + uri.toString());
return true;
}
}
return false;
}
protected void ensureLocalRootPathExists(String localRootPath, URI uri) {
s_logger.debug("making available " + localRootPath + " on " + uri.toString());
File file = new File(localRootPath);
s_logger.debug("local folder for mount will be " + file.getPath());
if (!file.exists()) {
s_logger.debug("create mount point: " + file.getPath());
_storage.mkdir(file.getPath());
// Need to check after mkdir to allow O/S to complete operation
if (!file.exists()) {
String errMsg = "Unable to create local folder for: " + localRootPath + " in order to mount " + uri.toString();
s_logger.error(errMsg);
throw new CloudRuntimeException(errMsg);
}
}
}
protected String getUriHostIp(URI uri) throws UnknownHostException {
String nfsHost = uri.getHost();
InetAddress nfsHostAddr = InetAddress.getByName(nfsHost);
String nfsHostIp = nfsHostAddr.getHostAddress();
s_logger.info("Determined host " + nfsHost + " corresponds to IP " + nfsHostIp);
return nfsHostIp;
}
@Override
public boolean start() {
return true;
}
@Override
public boolean stop() {
return true;
}
@Override
public StartupCommand[] initialize() {
final StartupSecondaryStorageCommand cmd = new StartupSecondaryStorageCommand();
fillNetworkInformation(cmd);
if (_publicIp != null) {
cmd.setPublicIpAddress(_publicIp);
}
if (_inSystemVM) {
Script command = new Script("/bin/bash", s_logger);
command.add("-c");
command.add("ln -sf " + _parent + " /var/www/html/copy");
String result = command.execute();
if (result != null) {
s_logger.warn("Error in linking err=" + result);
return null;
}
}
return new StartupCommand[] {cmd};
}
protected boolean checkForSnapshotsDir(String mountPoint) {
String snapshotsDirLocation = mountPoint + File.separator + "snapshots";
return createDir("snapshots", snapshotsDirLocation, mountPoint);
}
protected boolean checkForVolumesDir(String mountPoint) {
String volumesDirLocation = mountPoint + "/" + "volumes";
return createDir("volumes", volumesDirLocation, mountPoint);
}
protected boolean createDir(String dirName, String dirLocation, String mountPoint) {
boolean dirExists = false;
File dir = new File(dirLocation);
if (dir.exists()) {
if (dir.isDirectory()) {
s_logger.debug(dirName + " already exists on secondary storage, and is mounted at " + mountPoint);
dirExists = true;
} else {
if (dir.delete() && _storage.mkdir(dirLocation)) {
dirExists = true;
}
}
} else if (_storage.mkdir(dirLocation)) {
dirExists = true;
}
if (dirExists) {
s_logger.info(dirName + " directory created/exists on Secondary Storage.");
} else {
s_logger.info(dirName + " directory does not exist on Secondary Storage.");
}
return dirExists;
}
@Override
protected String getDefaultScriptsDir() {
return "./scripts/storage/secondary";
}
@Override
public void setName(String name) {
// TODO Auto-generated method stub
}
@Override
public void setConfigParams(Map<String, Object> params) {
// TODO Auto-generated method stub
}
@Override
public Map<String, Object> getConfigParams() {
// TODO Auto-generated method stub
return null;
}
@Override
public int getRunLevel() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setRunLevel(int level) {
// TODO Auto-generated method stub
}
@Override
public void fillNetworkInformation(final StartupCommand cmd) {
final String dummyMac = "00:06:0A:0B:0C:0D";
final String dummyNetmask = "255.255.255.0";
if (!_inSystemVM) {
cmd.setPrivateIpAddress(_eth1ip);
cmd.setPrivateMacAddress(dummyMac);
cmd.setPrivateNetmask(dummyNetmask);
cmd.setPublicIpAddress(_publicIp);
cmd.setPublicMacAddress(dummyMac);
cmd.setPublicNetmask(dummyNetmask);
cmd.setName(_hostname);
} else {
super.fillNetworkInformation(cmd);
}
}
private String getScriptLocation(UploadEntity.ResourceType resourceType) {
String scriptsDir = (String) _params.get("template.scripts.dir");
if (scriptsDir == null) {
scriptsDir = "scripts/storage/secondary";
}
String scriptname = null;
if (resourceType == UploadEntity.ResourceType.VOLUME) {
scriptname = "createvolume.sh";
} else if (resourceType == UploadEntity.ResourceType.TEMPLATE) {
scriptname = "createtmplt.sh";
} else {
throw new InvalidParameterValueException("cannot find script for resource type: " + resourceType);
}
return Script.findScript(scriptsDir, scriptname);
}
public UploadEntity createUploadEntity(String uuid, String metadata, long contentLength) {
TemplateOrVolumePostUploadCommand cmd = getTemplateOrVolumePostUploadCmd(metadata);
UploadEntity uploadEntity = null;
if(cmd == null ){
String errorMessage = "unable decode and deserialize metadata.";
updateStateMapWithError(uuid, errorMessage);
throw new InvalidParameterValueException(errorMessage);
} else {
uuid = cmd.getEntityUUID();
if (isOneTimePostUrlUsed(cmd)) {
uploadEntity = uploadEntityStateMap.get(uuid);
StringBuilder errorMessage = new StringBuilder("The one time post url is already used");
if (uploadEntity != null) {
errorMessage.append(" and the upload is in ").append(uploadEntity.getUploadState()).append(" state.");
}
throw new InvalidParameterValueException(errorMessage.toString());
}
int maxSizeInGB = Integer.parseInt(cmd.getMaxUploadSize());
int contentLengthInGB = getSizeInGB(contentLength);
if (contentLengthInGB > maxSizeInGB) {
String errorMessage = "Maximum file upload size exceeded. Content Length received: " + contentLengthInGB + "GB. Maximum allowed size: " + maxSizeInGB + "GB.";
updateStateMapWithError(uuid, errorMessage);
throw new InvalidParameterValueException(errorMessage);
}
checkSecondaryStorageResourceLimit(cmd, contentLengthInGB);
try {
String absolutePath = cmd.getAbsolutePath();
uploadEntity = new UploadEntity(uuid, cmd.getEntityId(), UploadEntity.Status.IN_PROGRESS, cmd.getName(), absolutePath);
uploadEntity.setMetaDataPopulated(true);
uploadEntity.setResourceType(UploadEntity.ResourceType.valueOf(cmd.getType()));
uploadEntity.setFormat(Storage.ImageFormat.valueOf(cmd.getImageFormat()));
//relative path with out ssvm mount info.
uploadEntity.setTemplatePath(absolutePath);
String dataStoreUrl = cmd.getDataTo();
String installPathPrefix = this.getRootDir(dataStoreUrl) + File.separator + absolutePath;
uploadEntity.setInstallPathPrefix(installPathPrefix);
uploadEntity.setHvm(cmd.getRequiresHvm());
uploadEntity.setChksum(cmd.getChecksum());
uploadEntity.setMaxSizeInGB(maxSizeInGB);
uploadEntity.setDescription(cmd.getDescription());
uploadEntity.setContentLength(contentLength);
// create a install dir
if (!_storage.exists(installPathPrefix)) {
_storage.mkdir(installPathPrefix);
}
uploadEntityStateMap.put(uuid, uploadEntity);
} catch (Exception e) {
//upload entity will be null incase an exception occurs and the handler will not proceed.
s_logger.error("exception occurred while creating upload entity ", e);
updateStateMapWithError(uuid, e.getMessage());
}
}
return uploadEntity;
}
private synchronized void checkSecondaryStorageResourceLimit(TemplateOrVolumePostUploadCommand cmd, int contentLengthInGB) {
String rootDir = this.getRootDir(cmd.getDataTo()) + File.separator;
long accountId = cmd.getAccountId();
long accountTemplateDirSize = 0;
File accountTemplateDir = new File(rootDir + getTemplatePathForAccount(accountId));
if(accountTemplateDir.exists()) {
FileUtils.sizeOfDirectory(accountTemplateDir);
}
long accountVolumeDirSize = 0;
File accountVolumeDir = new File(rootDir + getVolumePathForAccount(accountId));
if(accountVolumeDir.exists()) {
accountVolumeDirSize = FileUtils.sizeOfDirectory(accountVolumeDir);
}
long accountSnapshotDirSize = 0;
File accountSnapshotDir = new File(rootDir + getSnapshotPathForAccount(accountId));
if(accountSnapshotDir.exists()) {
accountSnapshotDirSize = FileUtils.sizeOfDirectory(accountSnapshotDir);
}
s_logger.debug("accountTemplateDirSize: " + accountTemplateDirSize + " accountSnapshotDirSize: " +accountSnapshotDirSize + " accountVolumeDirSize: " +
accountVolumeDirSize);
int accountDirSizeInGB = getSizeInGB(accountTemplateDirSize + accountSnapshotDirSize + accountVolumeDirSize);
int defaultMaxAccountSecondaryStorageInGB = Integer.parseInt(cmd.getDefaultMaxAccountSecondaryStorage());
if ((accountDirSizeInGB + contentLengthInGB) > defaultMaxAccountSecondaryStorageInGB) {
s_logger.error("accountDirSizeInGb: " + accountDirSizeInGB + " defaultMaxAccountSecondaryStorageInGB: " + defaultMaxAccountSecondaryStorageInGB + " contentLengthInGB:"
+ contentLengthInGB);
String errorMessage = "Maximum number of resources of type secondary_storage for account has exceeded";
updateStateMapWithError(cmd.getEntityUUID(), errorMessage);
throw new InvalidParameterValueException(errorMessage);
}
}
private String getVolumePathForAccount(long accountId) {
return TemplateConstants.DEFAULT_VOLUME_ROOT_DIR + "/" + accountId;
}
private String getTemplatePathForAccount(long accountId) {
return TemplateConstants.DEFAULT_TMPLT_ROOT_DIR + "/" + TemplateConstants.DEFAULT_TMPLT_FIRST_LEVEL_DIR + accountId;
}
private String getSnapshotPathForAccount(long accountId) {
return TemplateConstants.DEFAULT_SNAPSHOT_ROOT_DIR + "/" + accountId;
}
private boolean isOneTimePostUrlUsed(TemplateOrVolumePostUploadCommand cmd) {
String uuid = cmd.getEntityUUID();
String uploadPath = this.getRootDir(cmd.getDataTo()) + File.separator + cmd.getAbsolutePath();
return uploadEntityStateMap.containsKey(uuid) || new File(uploadPath).exists();
}
private int getSizeInGB(long sizeInBytes) {
return (int)Math.ceil(sizeInBytes * 1.0d / (1024 * 1024 * 1024));
}
public String postUpload(String uuid, String filename) {
UploadEntity uploadEntity = uploadEntityStateMap.get(uuid);
int installTimeoutPerGig = 180 * 60 * 1000;
String resourcePath = uploadEntity.getInstallPathPrefix();
String finalResourcePath = uploadEntity.getTmpltPath(); // template download
UploadEntity.ResourceType resourceType = uploadEntity.getResourceType();
String fileSavedTempLocation = uploadEntity.getInstallPathPrefix() + "/" + filename;
String uploadedFileExtension = FilenameUtils.getExtension(filename);
String userSelectedFormat= uploadEntity.getFormat().toString();
if(uploadedFileExtension.equals("zip") || uploadedFileExtension.equals("bz2") || uploadedFileExtension.equals("gz")) {
userSelectedFormat += "." + uploadedFileExtension;
}
String formatError = ImageStoreUtil.checkTemplateFormat(fileSavedTempLocation, userSelectedFormat);
if(StringUtils.isNotBlank(formatError)) {
String errorString = "File type mismatch between uploaded file and selected format. Selected file format: " + userSelectedFormat + ". Received: " + formatError;
s_logger.error(errorString);
return errorString;
}
int imgSizeGigs = getSizeInGB(_storage.getSize(fileSavedTempLocation));
int maxSize = uploadEntity.getMaxSizeInGB();
if(imgSizeGigs > maxSize) {
String errorMessage = "Maximum file upload size exceeded. Physical file size: " + imgSizeGigs + "GB. Maximum allowed size: " + maxSize + "GB.";
s_logger.error(errorMessage);
return errorMessage;
}
imgSizeGigs++; // add one just in case
long timeout = (long)imgSizeGigs * installTimeoutPerGig;
Script scr = new Script(getScriptLocation(resourceType), timeout, s_logger);
scr.add("-s", Integer.toString(imgSizeGigs));
scr.add("-S", Long.toString(UploadEntity.s_maxTemplateSize));
if (uploadEntity.getDescription() != null && uploadEntity.getDescription().length() > 1) {
scr.add("-d", uploadEntity.getDescription());
}
if (uploadEntity.isHvm()) {
scr.add("-h");
}
String checkSum = uploadEntity.getChksum();
if (StringUtils.isNotBlank(checkSum)) {
scr.add("-c", checkSum);
}
// add options common to ISO and template
String extension = uploadEntity.getFormat().getFileExtension();
String templateName = "";
if (extension.equals("iso")) {
templateName = uploadEntity.getUuid().trim().replace(" ", "_");
} else {
try {
templateName = UUID.nameUUIDFromBytes((uploadEntity.getFilename() + System.currentTimeMillis()).getBytes("UTF-8")).toString();
} catch (UnsupportedEncodingException e) {
templateName = uploadEntity.getUuid().trim().replace(" ", "_");
}
}
// run script to mv the temporary template file to the final template
// file
String templateFilename = templateName + "." + extension;
uploadEntity.setTemplatePath(finalResourcePath + "/" + templateFilename);
scr.add("-n", templateFilename);
scr.add("-t", resourcePath);
scr.add("-f", fileSavedTempLocation); // this is the temporary
// template file downloaded
if (uploadEntity.getChksum() != null && uploadEntity.getChksum().length() > 1) {
scr.add("-c", uploadEntity.getChksum());
}
scr.add("-u"); // cleanup
String result;
result = scr.execute();
if (result != null) {
return result;
}
// Set permissions for the downloaded template
File downloadedTemplate = new File(resourcePath + "/" + templateFilename);
_storage.setWorldReadableAndWriteable(downloadedTemplate);
// Set permissions for template/volume.properties
String propertiesFile = resourcePath;
if (resourceType == UploadEntity.ResourceType.TEMPLATE) {
propertiesFile += "/template.properties";
} else {
propertiesFile += "/volume.properties";
}
File templateProperties = new File(propertiesFile);
_storage.setWorldReadableAndWriteable(templateProperties);
TemplateLocation loc = new TemplateLocation(_storage, resourcePath);
try {
loc.create(uploadEntity.getEntityId(), true, uploadEntity.getFilename());
} catch (IOException e) {
s_logger.warn("Something is wrong with template location " + resourcePath, e);
loc.purge();
return "Unable to upload due to " + e.getMessage();
}
Map<String, Processor> processors = _dlMgr.getProcessors();
for (Processor processor : processors.values()) {
FormatInfo info = null;
try {
info = processor.process(resourcePath, null, templateName);
} catch (InternalErrorException e) {
s_logger.error("Template process exception ", e);
return e.toString();
}
if (info != null) {
loc.addFormat(info);
uploadEntity.setVirtualSize(info.virtualSize);
uploadEntity.setPhysicalSize(info.size);
break;
}
}
if (!loc.save()) {
s_logger.warn("Cleaning up because we're unable to save the formats");
loc.purge();
}
uploadEntity.setStatus(UploadEntity.Status.COMPLETED);
uploadEntityStateMap.put(uploadEntity.getUuid(), uploadEntity);
return null;
}
private String getPostUploadPSK() {
if(_ssvmPSK == null ) {
try {
_ssvmPSK = FileUtils.readFileToString(new File(POST_UPLOAD_KEY_LOCATION), "utf-8");
} catch (IOException e) {
s_logger.debug("Error while reading SSVM PSK from location " + POST_UPLOAD_KEY_LOCATION, e);
}
}
return _ssvmPSK;
}
public void updateStateMapWithError(String uuid,String errorMessage) {
UploadEntity uploadEntity=null;
if (uploadEntityStateMap.get(uuid)!=null) {
uploadEntity=uploadEntityStateMap.get(uuid);
}else {
uploadEntity= new UploadEntity();
}
uploadEntity.setStatus(UploadEntity.Status.ERROR);
uploadEntity.setErrorMessage(errorMessage);
uploadEntityStateMap.put(uuid, uploadEntity);
}
public void validatePostUploadRequest(String signature, String metadata, String timeout, String hostname,long contentLength, String uuid) throws InvalidParameterValueException{
// check none of the params are empty
if(StringUtils.isEmpty(signature) || StringUtils.isEmpty(metadata) || StringUtils.isEmpty(timeout)) {
updateStateMapWithError(uuid,"signature, metadata and expires are compulsory fields.");
throw new InvalidParameterValueException("signature, metadata and expires are compulsory fields.");
}
//check that contentLength exists and is greater than zero
if (contentLength <= 0) {
throw new InvalidParameterValueException("content length is not set in the request or has invalid value.");
}
//validate signature
String fullUrl = "https://" + hostname + "/upload/" + uuid;
String computedSignature = EncryptionUtil.generateSignature(metadata + fullUrl + timeout, getPostUploadPSK());
boolean isSignatureValid = computedSignature.equals(signature);
if(!isSignatureValid) {
updateStateMapWithError(uuid,"signature validation failed.");
throw new InvalidParameterValueException("signature validation failed.");
}
//validate timeout
DateTime timeoutDateTime = DateTime.parse(timeout, ISODateTimeFormat.dateTime());
if(timeoutDateTime.isBeforeNow()) {
updateStateMapWithError(uuid,"request not valid anymore.");
throw new InvalidParameterValueException("request not valid anymore.");
}
}
private TemplateOrVolumePostUploadCommand getTemplateOrVolumePostUploadCmd(String metadata) {
TemplateOrVolumePostUploadCommand cmd = null;
try {
Gson gson = new GsonBuilder().create();
cmd = gson.fromJson(EncryptionUtil.decodeData(metadata, getPostUploadPSK()), TemplateOrVolumePostUploadCommand.class);
} catch(Exception ex) {
s_logger.error("exception while decoding and deserialising metadata", ex);
}
return cmd;
}
}
|
coverity 1116693: properties template file creation unlikely but if repetitive, serious.
Signed-off-by: Daan Hoogland <0c11e55e6a36a8185db5745ba7416a2dde0b12ba@onecht.net>
|
services/secondary-storage/server/src/org/apache/cloudstack/storage/resource/NfsSecondaryStorageResource.java
|
coverity 1116693: properties template file creation unlikely but if repetitive, serious.
|
|
Java
|
apache-2.0
|
71adbc8e576a97d67f42a8768089e72dfc070c40
| 0
|
yihtserns/camelscript
|
/*
* Copyright 2013 yihtserns.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.yihtserns.camelscript.transform;
import com.github.yihtserns.camelscript.CamelContextCategory;
import groovy.lang.Delegate;
import groovy.lang.Mixin;
import org.apache.camel.CamelContext;
import org.apache.camel.impl.DefaultCamelContext;
import org.codehaus.groovy.ast.ASTNode;
import org.codehaus.groovy.ast.AnnotationNode;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.FieldNode;
import org.codehaus.groovy.ast.MixinASTTransformation;
import org.codehaus.groovy.ast.expr.ClassExpression;
import org.codehaus.groovy.ast.expr.ConstantExpression;
import org.codehaus.groovy.ast.expr.ConstructorCallExpression;
import org.codehaus.groovy.ast.expr.Expression;
import org.codehaus.groovy.ast.expr.FieldExpression;
import org.codehaus.groovy.ast.expr.StaticMethodCallExpression;
import org.codehaus.groovy.ast.stmt.ExpressionStatement;
import org.codehaus.groovy.control.CompilePhase;
import org.codehaus.groovy.control.SourceUnit;
import org.codehaus.groovy.transform.ASTTransformation;
import org.codehaus.groovy.transform.DelegateASTTransformation;
import org.codehaus.groovy.transform.GroovyASTTransformation;
import org.objectweb.asm.Opcodes;
import static org.codehaus.groovy.ast.expr.VariableExpression.THIS_EXPRESSION;
/**
* Transformer to turn a Groovy Script into a Camel Script.
*
* @author yihtserns
*/
@GroovyASTTransformation(phase = CompilePhase.CANONICALIZATION)
public class CamelScriptASTTransformation implements ASTTransformation {
private static final String CAMEL_CONTEXT_FIELD_NAME = "camelContext";
private DelegateASTTransformation delegateTransformation = new DelegateASTTransformation();
private MixinASTTransformation mixinTransformation = new MixinASTTransformation();
/**
* Source code representation of what this method is doing:
* <pre>
* {@literal @}Mixin(CamelContextCategory)
* public class SCRIPT_NAME {
* {@literal @}Delegate(deprecated=true)
* private CamelContext camelContext = new DefaultCamelContext(new ScriptBindingRegistry(this));
*
* {
* CamelContextStopper.registerToShutdownHook(camelContext);
* }
* }
* </pre>
*/
public void visit(final ASTNode[] nodes, final SourceUnit source) {
final ClassNode scriptClassNode = source.getAST().getScriptClassDummy();
ScriptClassNodeTransformer transformer = new ScriptClassNodeTransformer(scriptClassNode, source);
if (scriptClassNode.getField(CAMEL_CONTEXT_FIELD_NAME) != null) {
// Encountered inner class
return;
}
Expression newScriptRegistry = constructorOf(ScriptBindingRegistry.class, THIS_EXPRESSION);
Expression newCamelContext = constructorOf(DefaultCamelContext.class, newScriptRegistry);
FieldNode camelContextField = fieldNode(CAMEL_CONTEXT_FIELD_NAME, CamelContext.class, newCamelContext);
Expression registerToShutdownHook = staticMethodOf(
CamelContextStopper.class, "registerToShutdownHook", new FieldExpression(camelContextField));
transformer.delegateTo(camelContextField);
transformer.mixin(CamelContextCategory.class);
transformer.addToInitializerBlock(registerToShutdownHook);
}
private Expression constructorOf(final Class clazz, final Expression constructorArg) {
return new ConstructorCallExpression(new ClassNode(clazz), constructorArg);
}
/**
* Convenience method to create {@link FieldNode}.
* @param type field type
* @param initialValueExpression initial value for the field
* @return
*/
private FieldNode fieldNode(
final String fieldName, final Class<CamelContext> type, final Expression initialValueExpression) {
return new FieldNode(fieldName, Opcodes.ACC_PRIVATE, new ClassNode(type), null, initialValueExpression);
}
/**
* Convenience method to create {@link StaticMethodCallExpression}.
*/
private Expression staticMethodOf(final Class clazz, final String methodName, final Expression arguments) {
return new StaticMethodCallExpression(new ClassNode(clazz), methodName, arguments);
}
private class ScriptClassNodeTransformer {
private ClassNode scriptClassNode;
private SourceUnit source;
public ScriptClassNodeTransformer(final ClassNode scriptClassNode, final SourceUnit source) {
this.scriptClassNode = scriptClassNode;
this.source = source;
}
/**
* @param fieldNode delegate for the Groovy Script
* @see {@link Delegate}
*/
public void delegateTo(final FieldNode fieldNode) {
AnnotationNode delegateAnnotationNode = new AnnotationNode(new ClassNode(Delegate.class));
// Have to implement all methods in an interface, even deprecated ones
delegateAnnotationNode.setMember("deprecated", new ConstantExpression(true));
scriptClassNode.addField(fieldNode);
delegateTransformation.visit(
new ASTNode[]{delegateAnnotationNode, fieldNode},
source);
}
/**
* @param categoryClass Groovy Category to be mixed into the Groovy Script
*/
public void mixin(final Class categoryClass) {
AnnotationNode categoryAnnotationNode = new AnnotationNode(new ClassNode(Mixin.class));
categoryAnnotationNode.setMember("value", new ClassExpression(new ClassNode(categoryClass)));
mixinTransformation.visit(
new ASTNode[]{categoryAnnotationNode, scriptClassNode},
source);
}
public void addToInitializerBlock(final Expression expression) {
scriptClassNode.addObjectInitializerStatements(new ExpressionStatement(expression));
}
}
}
|
src/main/java/com/github/yihtserns/camelscript/transform/CamelScriptASTTransformation.java
|
/*
* Copyright 2013 yihtserns.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.yihtserns.camelscript.transform;
import com.github.yihtserns.camelscript.CamelContextCategory;
import groovy.lang.Delegate;
import groovy.lang.Mixin;
import org.apache.camel.CamelContext;
import org.apache.camel.impl.DefaultCamelContext;
import org.codehaus.groovy.ast.ASTNode;
import org.codehaus.groovy.ast.AnnotationNode;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.FieldNode;
import org.codehaus.groovy.ast.MixinASTTransformation;
import org.codehaus.groovy.ast.expr.ClassExpression;
import org.codehaus.groovy.ast.expr.ConstantExpression;
import org.codehaus.groovy.ast.expr.ConstructorCallExpression;
import org.codehaus.groovy.ast.expr.Expression;
import org.codehaus.groovy.ast.expr.FieldExpression;
import org.codehaus.groovy.ast.expr.StaticMethodCallExpression;
import org.codehaus.groovy.ast.stmt.ExpressionStatement;
import org.codehaus.groovy.control.CompilePhase;
import org.codehaus.groovy.control.SourceUnit;
import org.codehaus.groovy.transform.ASTTransformation;
import org.codehaus.groovy.transform.DelegateASTTransformation;
import org.codehaus.groovy.transform.GroovyASTTransformation;
import org.objectweb.asm.Opcodes;
import static org.codehaus.groovy.ast.expr.VariableExpression.THIS_EXPRESSION;
/**
* Transformer to turn a Groovy Script into a Camel Script.
*
* @author yihtserns
*/
@GroovyASTTransformation(phase = CompilePhase.CANONICALIZATION)
public class CamelScriptASTTransformation implements ASTTransformation {
private static final String CAMEL_CONTEXT_FIELD_NAME = "camelContext";
private DelegateASTTransformation delegateTransformation = new DelegateASTTransformation();
private MixinASTTransformation mixinTransformation = new MixinASTTransformation();
/**
* Source code representation of what this method is doing:
* <pre>
* {@literal @}Mixin(CamelContextCategory)
* public class SCRIPT_NAME {
* {@literal @}Delegate
* private CamelContext camelContext = new DefaultCamelContext(new ScriptBindingRegistry(this));
*
* {
* CamelContextStopper.registerToShutdownHook(camelContext);
* }
* }
* </pre>
*/
public void visit(final ASTNode[] nodes, final SourceUnit source) {
final ClassNode scriptClassNode = source.getAST().getScriptClassDummy();
ScriptClassNodeTransformer transformer = new ScriptClassNodeTransformer(scriptClassNode, source);
if (scriptClassNode.getField(CAMEL_CONTEXT_FIELD_NAME) != null) {
// Encountered inner class
return;
}
Expression newScriptRegistry = constructorOf(ScriptBindingRegistry.class, THIS_EXPRESSION);
Expression newCamelContext = constructorOf(DefaultCamelContext.class, newScriptRegistry);
FieldNode camelContextField = fieldNode(CAMEL_CONTEXT_FIELD_NAME, CamelContext.class, newCamelContext);
Expression registerToShutdownHook = staticMethodOf(
CamelContextStopper.class, "registerToShutdownHook", new FieldExpression(camelContextField));
transformer.delegateTo(camelContextField);
transformer.mixin(CamelContextCategory.class);
transformer.addToInitializerBlock(registerToShutdownHook);
}
private Expression constructorOf(final Class clazz, final Expression constructorArg) {
return new ConstructorCallExpression(new ClassNode(clazz), constructorArg);
}
/**
* Convenience method to create {@link FieldNode}.
* @param type field type
* @param initialValueExpression initial value for the field
* @return
*/
private FieldNode fieldNode(
final String fieldName, final Class<CamelContext> type, final Expression initialValueExpression) {
return new FieldNode(fieldName, Opcodes.ACC_PRIVATE, new ClassNode(type), null, initialValueExpression);
}
/**
* Convenience method to create {@link StaticMethodCallExpression}.
*/
private Expression staticMethodOf(final Class clazz, final String methodName, final Expression arguments) {
return new StaticMethodCallExpression(new ClassNode(clazz), methodName, arguments);
}
private class ScriptClassNodeTransformer {
private ClassNode scriptClassNode;
private SourceUnit source;
public ScriptClassNodeTransformer(final ClassNode scriptClassNode, final SourceUnit source) {
this.scriptClassNode = scriptClassNode;
this.source = source;
}
/**
* @param fieldNode delegate for the Groovy Script
* @see {@link Delegate}
*/
public void delegateTo(final FieldNode fieldNode) {
AnnotationNode delegateAnnotationNode = new AnnotationNode(new ClassNode(Delegate.class));
delegateAnnotationNode.setMember("deprecated", new ConstantExpression(true));
scriptClassNode.addField(fieldNode);
delegateTransformation.visit(
new ASTNode[]{delegateAnnotationNode, fieldNode},
source);
}
/**
* @param categoryClass Groovy Category to be mixed into the Groovy Script
*/
public void mixin(final Class categoryClass) {
AnnotationNode categoryAnnotationNode = new AnnotationNode(new ClassNode(Mixin.class));
categoryAnnotationNode.setMember("value", new ClassExpression(new ClassNode(categoryClass)));
mixinTransformation.visit(
new ASTNode[]{categoryAnnotationNode, scriptClassNode},
source);
}
public void addToInitializerBlock(final Expression expression) {
scriptClassNode.addObjectInitializerStatements(new ExpressionStatement(expression));
}
}
}
|
Added comment to explain why deprecated=true for @Delegate.
|
src/main/java/com/github/yihtserns/camelscript/transform/CamelScriptASTTransformation.java
|
Added comment to explain why deprecated=true for @Delegate.
|
|
Java
|
apache-2.0
|
59340d7213703be7770a8dceedb45b2b6bc705f7
| 0
|
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
|
// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.jdisc.http.filter.security.rule;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yahoo.container.jdisc.RequestHandlerTestDriver.MockResponseHandler;
import com.yahoo.jdisc.Metric;
import com.yahoo.jdisc.Response;
import com.yahoo.jdisc.http.filter.DiscFilterRequest;
import com.yahoo.jdisc.http.filter.security.rule.RuleBasedFilterConfig.DefaultRule;
import com.yahoo.jdisc.http.filter.security.rule.RuleBasedFilterConfig.Rule;
import com.yahoo.test.json.JsonTestHelper;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* @author bjorncs
*/
class RuleBasedRequestFilterTest {
private static final ObjectMapper jsonMapper = new ObjectMapper();
@Test
void matches_rule_that_allows_all_methods_and_paths() {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.BLOCK))
.rule(new Rule.Builder()
.name("first")
.hostNames("myserver")
.pathExpressions(List.of())
.methods(List.of())
.action(Rule.Action.Enum.ALLOW))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("PATCH", "http://myserver:80/path-to-resource"), responseHandler);
assertAllowed(responseHandler, metric);
}
@Test
void performs_action_on_first_matching_rule() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.ALLOW))
.rule(new Rule.Builder()
.name("first")
.pathExpressions("/path-to-resource")
.methods(Rule.Methods.Enum.DELETE)
.action(Rule.Action.Enum.BLOCK)
.blockResponseCode(403))
.rule(new Rule.Builder()
.name("second")
.pathExpressions("/path-to-resource")
.methods(Rule.Methods.Enum.GET)
.action(Rule.Action.Enum.BLOCK)
.blockResponseCode(404))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("GET", "http://myserver:80/path-to-resource"), responseHandler);
assertBlocked(responseHandler, metric, 404, "");
}
@Test
void performs_default_action_if_no_rule_matches() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.BLOCK)
.blockResponseCode(403)
.blockResponseMessage("my custom message"))
.rule(new Rule.Builder()
.name("rule")
.pathExpressions("/path-to-resource")
.methods(Rule.Methods.Enum.GET)
.action(Rule.Action.Enum.ALLOW))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("POST", "http://myserver:80/"), responseHandler);
assertBlocked(responseHandler, metric, 403, "my custom message");
}
@Test
void matches_rule_with_multiple_alternatives_for_host_path_and_method() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.ALLOW))
.rule(new Rule.Builder()
.name("rule")
.hostNames(Set.of("server1", "server2", "server3"))
.pathExpressions(Set.of("/path-to-resource/{*}", "/another-path"))
.methods(Set.of(Rule.Methods.Enum.GET, Rule.Methods.POST, Rule.Methods.DELETE))
.action(Rule.Action.Enum.BLOCK)
.blockResponseCode(404)
.blockResponseMessage("not found"))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("POST", "https://server1:443/path-to-resource/id/1/subid/2"), responseHandler);
assertBlocked(responseHandler, metric, 404, "not found");
}
@Test
void no_filtering_if_request_is_allowed() {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.ALLOW))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("DELETE", "http://myserver:80/"), responseHandler);
assertAllowed(responseHandler, metric);
}
@Test
void includes_default_rule_response_headers_in_response_for_blocked_request() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.BLOCK)
.blockResponseHeaders(new DefaultRule.BlockResponseHeaders.Builder()
.name("Response-Header-1").value("first-header"))
.blockResponseHeaders(new DefaultRule.BlockResponseHeaders.Builder()
.name("Response-Header-2").value("second-header")))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("GET", "http://myserver:80/"), responseHandler);
assertBlocked(responseHandler, metric, 403, "");
Response response = responseHandler.getResponse();
assertResponseHeader(response, "Response-Header-1", "first-header");
assertResponseHeader(response, "Response-Header-2", "second-header");
}
@Test
void includes_rule_response_headers_in_response_for_blocked_request() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.ALLOW))
.rule(new Rule.Builder()
.name("rule")
.pathExpressions("/path-to-resource")
.action(Rule.Action.Enum.BLOCK)
.blockResponseHeaders(new Rule.BlockResponseHeaders.Builder()
.name("Response-Header-1").value("first-header")))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("GET", "http://myserver/path-to-resource"), responseHandler);
assertBlocked(responseHandler, metric, 403, "");
Response response = responseHandler.getResponse();
assertResponseHeader(response, "Response-Header-1", "first-header");
}
@Test
void dryrun_does_not_block() {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(true)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.BLOCK))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("GET", "http://myserver/"), responseHandler);
assertNull(responseHandler.getResponse());
}
private void assertResponseHeader(Response response, String name, String expectedValue) {
List<String> actualValues = response.headers().get(name);
assertNotNull(actualValues);
assertEquals(1, actualValues.size());
assertEquals(expectedValue, actualValues.get(0));
}
private static DiscFilterRequest request(String method, String uri) {
DiscFilterRequest request = mock(DiscFilterRequest.class);
when(request.getMethod()).thenReturn(method);
when(request.getUri()).thenReturn(URI.create(uri));
return request;
}
private static void assertAllowed(MockResponseHandler handler, Metric metric) {
verify(metric).add(eq("jdisc.http.filter.rule.allowed_requests"), eq(1L), any());
assertNull(handler.getResponse());
}
private static void assertBlocked(MockResponseHandler handler, Metric metric, int expectedCode, String expectedMessage) throws IOException {
verify(metric).add(eq("jdisc.http.filter.rule.blocked_requests"), eq(1L), any());
Response response = handler.getResponse();
assertNotNull(response);
assertEquals(expectedCode, response.getStatus());
ObjectNode expectedJson = jsonMapper.createObjectNode();
expectedJson.put("message", expectedMessage).put("code", expectedCode);
JsonNode actualJson = jsonMapper.readTree(handler.readAll().getBytes());
JsonTestHelper.assertJsonEquals(expectedJson, actualJson);
}
}
|
jdisc-security-filters/src/test/java/com/yahoo/jdisc/http/filter/security/rule/RuleBasedRequestFilterTest.java
|
// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.jdisc.http.filter.security.rule;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yahoo.container.jdisc.RequestHandlerTestDriver.MockResponseHandler;
import com.yahoo.jdisc.Metric;
import com.yahoo.jdisc.Response;
import com.yahoo.jdisc.http.filter.DiscFilterRequest;
import com.yahoo.jdisc.http.filter.security.rule.RuleBasedFilterConfig.DefaultRule;
import com.yahoo.jdisc.http.filter.security.rule.RuleBasedFilterConfig.Rule;
import com.yahoo.test.json.JsonTestHelper;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* @author bjorncs
*/
class RuleBasedRequestFilterTest {
private static final ObjectMapper jsonMapper = new ObjectMapper();
@Test
void matches_rule_that_allows_all_methods_and_paths() {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.BLOCK))
.rule(new Rule.Builder()
.name("first")
.hostNames("myserver")
.pathExpressions(List.of())
.methods(List.of())
.action(Rule.Action.Enum.ALLOW))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("PATCH", "http://myserver:80/path-to-resource"), responseHandler);
assertAllowed(responseHandler, metric);
}
@Test
void performs_action_on_first_matching_rule() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.ALLOW))
.rule(new Rule.Builder()
.name("first")
.pathExpressions("/path-to-resource")
.methods(Rule.Methods.Enum.DELETE)
.action(Rule.Action.Enum.BLOCK)
.blockResponseCode(403))
.rule(new Rule.Builder()
.name("second")
.pathExpressions("/path-to-resource")
.methods(Rule.Methods.Enum.GET)
.action(Rule.Action.Enum.BLOCK)
.blockResponseCode(404))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("GET", "http://myserver:80/path-to-resource"), responseHandler);
assertBlocked(responseHandler, metric, 404, "");
}
@Test
void performs_default_action_if_no_rule_matches() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.BLOCK)
.blockResponseCode(403)
.blockResponseMessage("my custom message"))
.rule(new Rule.Builder()
.name("rule")
.pathExpressions("/path-to-resource")
.methods(Rule.Methods.Enum.GET)
.action(Rule.Action.Enum.ALLOW))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("POST", "http://myserver:80/"), responseHandler);
assertBlocked(responseHandler, metric, 403, "my custom message");
}
@Test
void matches_rule_with_multiple_alternatives_for_host_path_and_method() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.ALLOW))
.rule(new Rule.Builder()
.name("rule")
.hostNames(Set.of("server1", "server2", "server3"))
.pathExpressions(Set.of("/path-to-resource/{*}", "/another-path"))
.methods(Set.of(Rule.Methods.Enum.GET, Rule.Methods.POST, Rule.Methods.DELETE))
.action(Rule.Action.Enum.BLOCK)
.blockResponseCode(404)
.blockResponseMessage("not found"))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("POST", "https://server1:443/path-to-resource/id/1/subid/2"), responseHandler);
assertBlocked(responseHandler, metric, 404, "not found");
}
@Test
void no_filtering_if_request_is_allowed() {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.ALLOW))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("DELETE", "http://myserver:80/"), responseHandler);
assertAllowed(responseHandler, metric);
}
@Test
void includes_default_rule_response_headers_in_response_for_blocked_request() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.BLOCK)
.blockResponseHeaders(new DefaultRule.BlockResponseHeaders.Builder()
.name("Response-Header-1").value("first-header"))
.blockResponseHeaders(new DefaultRule.BlockResponseHeaders.Builder()
.name("Response-Header-2").value("second-header")))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("GET", "http://myserver:80/"), responseHandler);
assertBlocked(responseHandler, metric, 403, "");
Response response = responseHandler.getResponse();
assertResponseHeader(response, "Response-Header-1", "first-header");
assertResponseHeader(response, "Response-Header-2", "second-header");
}
@Test
void includes_rule_response_headers_in_response_for_blocked_request() throws IOException {
RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder()
.dryrun(false)
.defaultRule(new DefaultRule.Builder()
.action(DefaultRule.Action.Enum.ALLOW))
.rule(new Rule.Builder()
.name("rule")
.pathExpressions("/path-to-resource")
.action(Rule.Action.Enum.BLOCK)
.blockResponseHeaders(new Rule.BlockResponseHeaders.Builder()
.name("Response-Header-1").value("first-header")))
.build();
Metric metric = mock(Metric.class);
RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config);
MockResponseHandler responseHandler = new MockResponseHandler();
filter.filter(request("GET", "http://myserver/path-to-resource"), responseHandler);
assertBlocked(responseHandler, metric, 403, "");
Response response = responseHandler.getResponse();
assertResponseHeader(response, "Response-Header-1", "first-header");
}
private void assertResponseHeader(Response response, String name, String expectedValue) {
List<String> actualValues = response.headers().get(name);
assertNotNull(actualValues);
assertEquals(1, actualValues.size());
assertEquals(expectedValue, actualValues.get(0));
}
private static DiscFilterRequest request(String method, String uri) {
DiscFilterRequest request = mock(DiscFilterRequest.class);
when(request.getMethod()).thenReturn(method);
when(request.getUri()).thenReturn(URI.create(uri));
return request;
}
private static void assertAllowed(MockResponseHandler handler, Metric metric) {
verify(metric).add(eq("jdisc.http.filter.rule.allowed_requests"), eq(1L), any());
assertNull(handler.getResponse());
}
private static void assertBlocked(MockResponseHandler handler, Metric metric, int expectedCode, String expectedMessage) throws IOException {
verify(metric).add(eq("jdisc.http.filter.rule.blocked_requests"), eq(1L), any());
Response response = handler.getResponse();
assertNotNull(response);
assertEquals(expectedCode, response.getStatus());
ObjectNode expectedJson = jsonMapper.createObjectNode();
expectedJson.put("message", expectedMessage).put("code", expectedCode);
JsonNode actualJson = jsonMapper.readTree(handler.readAll().getBytes());
JsonTestHelper.assertJsonEquals(expectedJson, actualJson);
}
}
|
Test dryrun
|
jdisc-security-filters/src/test/java/com/yahoo/jdisc/http/filter/security/rule/RuleBasedRequestFilterTest.java
|
Test dryrun
|
|
Java
|
apache-2.0
|
719b7ae95ebf9144c24369bca36c4fb6c0337880
| 0
|
rlangbehn/rules,rlangbehn/rules
|
/*****************************************************************************
* $Id$
*
* Copyright 2008, The Rules Framework Development Team, and individual
* contributors as indicated by the @authors tag. See the copyright.txt
* in the distribution for a full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
****************************************************************************/
package net.sourceforge.rules.service;
import java.rmi.RemoteException;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import javax.ejb.Local;
import javax.ejb.Remote;
import javax.ejb.Stateless;
import javax.jws.WebMethod;
import javax.jws.WebParam;
import javax.jws.WebResult;
import javax.jws.WebService;
import javax.rules.InvalidRuleSessionException;
import javax.rules.ObjectFilter;
import javax.rules.RuleExecutionSetNotFoundException;
import javax.rules.RuleRuntime;
import javax.rules.RuleSession;
import javax.rules.RuleSessionCreateException;
import javax.rules.RuleSessionTypeUnsupportedException;
import javax.rules.StatelessRuleSession;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* TODO
*
* @version $Revision$ $Date$
* @author <a href="mailto:rlangbehn@users.sourceforge.net">Rainer Langbehn</a>
*/
@Local({StatelessDecisionService.class})
@Remote({StatelessDecisionServiceRemote.class})
@Stateless(name="StatelessDecisionService")
//@WebService(serviceName="StatelessDecisionService")
public class StatelessDecisionServiceBean implements StatelessDecisionServiceRemote
{
// Constants -------------------------------------------------------------
/**
* The <code>Log</code> instance for this class.
*/
private static final Log log = LogFactory.getLog(
StatelessDecisionServiceBean.class);
/**
* TODO
*/
private static final long serialVersionUID = 1L;
// Attributes ------------------------------------------------------------
/**
* TODO
*/
private RuleRuntime ruleRuntime;
// Static ----------------------------------------------------------------
// Constructors ----------------------------------------------------------
// StatelessDecisionService implementation -------------------------------
/* (non-Javadoc)
* @see net.sourceforge.rules.service.StatelessDecisionService#decide(java.lang.String, java.util.Map, java.util.List)
*/
@WebMethod()
@WebResult(name="outputObjects")
public List<?> decide(
@WebParam(name="bindUri")
String bindUri,
@WebParam(name="properties")
@XmlJavaTypeAdapter(MapAdapter.class)
Map<?, ?> properties,
@WebParam(name="inputObjects")
List<?> inputObjects)
throws DecisionServiceException {
if (log.isDebugEnabled()) {
StringBuilder sb = new StringBuilder("Executing RuleExecutionSet"); //$NON-NLS-1$
sb.append("\n\tBindURI : ").append(bindUri); //$NON-NLS-1$
sb.append("\n\tProperties : ").append(properties); //$NON-NLS-1$
sb.append("\n\tInputObjects: ").append(inputObjects); //$NON-NLS-1$
log.debug(sb.toString());
}
int sessionType = RuleRuntime.STATELESS_SESSION_TYPE;
StatelessRuleSession ruleSession = null;
ObjectFilter objectFilter = null;
List<?> outputObjects = null;
try {
ruleSession = (StatelessRuleSession)
ruleRuntime.createRuleSession(bindUri, properties, sessionType);
objectFilter = createObjectFilter(inputObjects, properties);
outputObjects = ruleSession.executeRules(
inputObjects,
objectFilter
);
ruleSession.release();
ruleSession = null;
} catch (RuleSessionTypeUnsupportedException e) {
String s = Messages.getError("StatelessDecisionServiceBean.4"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} catch (RuleSessionCreateException e) {
String s = Messages.getError("StatelessDecisionServiceBean.5"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} catch (RuleExecutionSetNotFoundException e) {
String s = Messages.getError("StatelessDecisionServiceBean.6"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} catch (RemoteException e) {
String s = Messages.getError("StatelessDecisionServiceBean.7"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} catch (InvalidRuleSessionException e) {
String s = Messages.getError("StatelessDecisionServiceBean.8"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} finally {
release(ruleSession);
}
return outputObjects;
}
// Public ----------------------------------------------------------------
/**
* TODO
*
* @param ruleRuntime the ruleRuntime to set
*/
@Resource(mappedName="java:/RuleSessionFactory")
@WebMethod(exclude=true)
public void setRuleRuntime(RuleRuntime ruleRuntime) {
this.ruleRuntime = ruleRuntime;
}
// Package protected -----------------------------------------------------
// Protected -------------------------------------------------------------
/**
* TODO
*
* @param inputObjects
* @param properties
* @return
*/
protected ObjectFilter createObjectFilter(
List<?> inputObjects,
Map<?, ?> properties) {
return new OutputObjectsOnlyObjectFilter(inputObjects);
}
// Private ---------------------------------------------------------------
/**
* Unconditionally release the given <code>RuleSession</code>.
* <p>
* Equivalent to {@link javax.rules.RuleSession#release()},
* except any exceptions will be ignored. This is typically
* used in finally blocks.
*
* @param ruleSession the <code>RuleSession</code> to be released,
* may be null or already released.
*/
private void release(RuleSession ruleSession) {
if (ruleSession != null) {
try {
ruleSession.release();
} catch (InvalidRuleSessionException e) {
String s = "Error while releasing rule session"; //$NON-NLS-1$
log.warn(s, e);
} catch (RemoteException e) {
String s = "Error while releasing rule session"; //$NON-NLS-1$
log.warn(s, e);
}
}
}
// Inner classes ---------------------------------------------------------
}
|
rules-service/rules-service-impl/src/main/java/net/sourceforge/rules/service/StatelessDecisionServiceBean.java
|
/*****************************************************************************
* $Id$
*
* Copyright 2008, The Rules Framework Development Team, and individual
* contributors as indicated by the @authors tag. See the copyright.txt
* in the distribution for a full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
****************************************************************************/
package net.sourceforge.rules.service;
import java.rmi.RemoteException;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import javax.ejb.Local;
import javax.ejb.Remote;
import javax.ejb.Stateless;
import javax.jws.WebMethod;
import javax.jws.WebParam;
import javax.jws.WebResult;
import javax.jws.WebService;
import javax.rules.InvalidRuleSessionException;
import javax.rules.ObjectFilter;
import javax.rules.RuleExecutionSetNotFoundException;
import javax.rules.RuleRuntime;
import javax.rules.RuleSession;
import javax.rules.RuleSessionCreateException;
import javax.rules.RuleSessionTypeUnsupportedException;
import javax.rules.StatelessRuleSession;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* TODO
*
* @version $Revision$ $Date$
* @author <a href="mailto:rlangbehn@users.sourceforge.net">Rainer Langbehn</a>
*/
@Local({StatelessDecisionService.class})
@Remote({StatelessDecisionServiceRemote.class})
@Stateless(name="StatelessDecisionService")
//@WebService(serviceName="StatelessDecisionService")
public class StatelessDecisionServiceBean implements StatelessDecisionServiceRemote
{
// Constants -------------------------------------------------------------
/**
* The <code>Log</code> instance for this class.
*/
private static final Log log = LogFactory.getLog(
StatelessDecisionServiceBean.class);
/**
* TODO
*/
private static final long serialVersionUID = 1L;
// Attributes ------------------------------------------------------------
/**
* TODO
*/
private RuleRuntime ruleRuntime;
// Static ----------------------------------------------------------------
// Constructors ----------------------------------------------------------
// StatelessDecisionService implementation -------------------------------
/* (non-Javadoc)
* @see net.sourceforge.rules.service.StatelessDecisionService#decide(java.lang.String, java.util.Map, java.util.List)
*/
//@WebMethod()
//@WebResult(name="outputObjects")
public List<?> decide(
//@WebParam(name="bindUri")
String bindUri,
//@WebParam(name="properties")
//@XmlJavaTypeAdapter(MapAdapter.class)
Map<?, ?> properties,
//@WebParam(name="inputObjects")
List<?> inputObjects)
throws DecisionServiceException {
if (log.isDebugEnabled()) {
StringBuilder sb = new StringBuilder("Executing RuleExecutionSet"); //$NON-NLS-1$
sb.append("\n\tBindURI : ").append(bindUri); //$NON-NLS-1$
sb.append("\n\tProperties : ").append(properties); //$NON-NLS-1$
sb.append("\n\tInputObjects: ").append(inputObjects); //$NON-NLS-1$
log.debug(sb.toString());
}
int sessionType = RuleRuntime.STATELESS_SESSION_TYPE;
StatelessRuleSession ruleSession = null;
ObjectFilter objectFilter = null;
List<?> outputObjects = null;
try {
ruleSession = (StatelessRuleSession)
ruleRuntime.createRuleSession(bindUri, properties, sessionType);
objectFilter = createObjectFilter(inputObjects, properties);
outputObjects = ruleSession.executeRules(
inputObjects,
objectFilter
);
ruleSession.release();
ruleSession = null;
} catch (RuleSessionTypeUnsupportedException e) {
String s = Messages.getError("StatelessDecisionServiceBean.4"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} catch (RuleSessionCreateException e) {
String s = Messages.getError("StatelessDecisionServiceBean.5"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} catch (RuleExecutionSetNotFoundException e) {
String s = Messages.getError("StatelessDecisionServiceBean.6"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} catch (RemoteException e) {
String s = Messages.getError("StatelessDecisionServiceBean.7"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} catch (InvalidRuleSessionException e) {
String s = Messages.getError("StatelessDecisionServiceBean.8"); //$NON-NLS-1$
throw new DecisionServiceException(s, e);
} finally {
release(ruleSession);
}
return outputObjects;
}
// Public ----------------------------------------------------------------
/**
* TODO
*
* @param ruleRuntime the ruleRuntime to set
*/
@Resource(mappedName="java:/RuleSessionFactory")
@WebMethod(exclude=true)
public void setRuleRuntime(RuleRuntime ruleRuntime) {
this.ruleRuntime = ruleRuntime;
}
// Package protected -----------------------------------------------------
// Protected -------------------------------------------------------------
/**
* TODO
*
* @param inputObjects
* @param properties
* @return
*/
protected ObjectFilter createObjectFilter(
List<?> inputObjects,
Map<?, ?> properties) {
return new OutputObjectsOnlyObjectFilter(inputObjects);
}
// Private ---------------------------------------------------------------
/**
* Unconditionally release the given <code>RuleSession</code>.
* <p>
* Equivalent to {@link javax.rules.RuleSession#release()},
* except any exceptions will be ignored. This is typically
* used in finally blocks.
*
* @param ruleSession the <code>RuleSession</code> to be released,
* may be null or already released.
*/
private void release(RuleSession ruleSession) {
if (ruleSession != null) {
try {
ruleSession.release();
} catch (InvalidRuleSessionException e) {
String s = "Error while releasing rule session"; //$NON-NLS-1$
log.warn(s, e);
} catch (RemoteException e) {
String s = "Error while releasing rule session"; //$NON-NLS-1$
log.warn(s, e);
}
}
}
// Inner classes ---------------------------------------------------------
}
|
initial revision
|
rules-service/rules-service-impl/src/main/java/net/sourceforge/rules/service/StatelessDecisionServiceBean.java
|
initial revision
|
|
Java
|
apache-2.0
|
fa757810d0cd285eb3ed76b3eedfef01b9cc9d71
| 0
|
edx/edx-app-android,edx/edx-app-android,edx/edx-app-android,edx/edx-app-android,edx/edx-app-android,edx/edx-app-android
|
package org.edx.mobile.util.links;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.FragmentActivity;
import android.text.TextUtils;
import android.view.View;
import android.widget.Toast;
import com.google.inject.Inject;
import org.edx.mobile.R;
import org.edx.mobile.base.BaseFragmentActivity;
import org.edx.mobile.core.IEdxEnvironment;
import org.edx.mobile.course.CourseAPI;
import org.edx.mobile.course.CourseService;
import org.edx.mobile.http.HttpStatus;
import org.edx.mobile.http.HttpStatusException;
import org.edx.mobile.logger.Logger;
import org.edx.mobile.model.api.EnrolledCoursesResponse;
import org.edx.mobile.util.ResourceUtil;
import org.edx.mobile.view.common.TaskProgressCallback;
import org.edx.mobile.view.custom.URLInterceptorWebViewClient;
import org.edx.mobile.view.dialog.EnrollmentFailureDialogFragment;
import org.edx.mobile.view.dialog.IDialogCallback;
import java.util.HashMap;
import java.util.Map;
import okhttp3.ResponseBody;
import roboguice.RoboGuice;
/**
* A ready to use implementation of {@link org.edx.mobile.view.custom.URLInterceptorWebViewClient.ActionListener}
* for all the classes that need to handle a WebView's recognized links as defined in
* {@link WebViewLink} class.
*/
public class DefaultActionListener implements URLInterceptorWebViewClient.ActionListener {
private final Logger logger = new Logger(URLInterceptorWebViewClient.class);
/**
* Provides callbacks to know about the status of enrollment in a course.
*/
public interface EnrollCallback {
void onResponse(@NonNull final EnrolledCoursesResponse course);
void onFailure(@NonNull Throwable error);
void onUserNotLoggedIn(@NonNull String courseId, boolean emailOptIn);
}
@Inject
private IEdxEnvironment environment;
@Inject
private CourseService courseService;
@Inject
private CourseAPI courseApi;
private FragmentActivity activity;
private View progressWheel;
private EnrollCallback enrollCallback;
private boolean isTaskInProgress = false;
public DefaultActionListener(@NonNull FragmentActivity activity, @NonNull View progressWheel,
@NonNull EnrollCallback enrollCallback) {
this.activity = activity;
this.progressWheel = progressWheel;
this.enrollCallback = enrollCallback;
RoboGuice.injectMembers(activity, this);
}
@Override
public void onLinkRecognized(@NonNull WebViewLink helper) {
switch (helper.authority) {
case ENROLLED_PROGRAM_INFO: {
final CharSequence url = ResourceUtil.getFormattedString(
environment.getConfig().getProgramConfig().getDetailUrlTemplate(),
WebViewLink.Param.PATH_ID,
helper.params.get(WebViewLink.Param.PATH_ID));
environment.getRouter().showAuthenticatedWebviewActivity(activity, url.toString(),
activity.getString(R.string.label_my_programs));
break;
}
case ENROLLED_COURSE_INFO: {
final String courseId = helper.params.get(WebViewLink.Param.COURSE_ID);
new Handler().post(new Runnable() {
@Override
public void run() {
courseApi.getEnrolledCourses().enqueue(new CourseAPI.GetCourseByIdCallback(
activity, courseId,
new TaskProgressCallback.ProgressViewController(progressWheel)) {
@Override
protected void onResponse(@NonNull final EnrolledCoursesResponse course) {
environment.getRouter().showCourseDashboardTabs(activity, course, false);
}
@Override
protected void onFailure(@NonNull final Throwable error) {
Toast.makeText(activity, R.string.cannot_show_dashboard, Toast.LENGTH_SHORT).show();
}
});
}
});
break;
}
case COURSE_INFO: {
final String pathId = helper.params.get(WebViewLink.Param.PATH_ID);
if (!TextUtils.isEmpty(pathId)) {
logger.debug("PathId" + pathId);
environment.getRouter().showCourseInfo(activity, pathId);
}
break;
}
case PROGRAM_INFO: {
final String pathId = helper.params.get(WebViewLink.Param.PATH_ID);
if (!TextUtils.isEmpty(pathId)) {
logger.debug("PathId" + pathId);
// Program info coming soon
environment.getRouter().showProgramInfo(activity, pathId);
}
break;
}
case ENROLL: {
final String courseId = helper.params.get(WebViewLink.Param.COURSE_ID);
final String emailOptIn = helper.params.get(WebViewLink.Param.EMAIL_OPT);
onClickEnroll(courseId, Boolean.getBoolean(emailOptIn));
break;
}
}
}
public void onClickEnroll(@NonNull final String courseId, final boolean emailOptIn) {
if (isTaskInProgress) {
// avoid duplicate actions
logger.debug("already enroll task is in progress, so skipping Enroll action");
return;
}
if (environment.getLoginPrefs().getUsername() == null) {
enrollCallback.onUserNotLoggedIn(courseId, emailOptIn);
return;
}
isTaskInProgress = true;
environment.getAnalyticsRegistry().trackEnrollClicked(courseId, emailOptIn);
logger.debug("CourseId - " + courseId);
logger.debug("Email option - " + emailOptIn);
courseService.enrollInACourse(new CourseService.EnrollBody(courseId, emailOptIn))
.enqueue(new CourseService.EnrollCallback(
activity,
new TaskProgressCallback.ProgressViewController(progressWheel)) {
@Override
protected void onResponse(@NonNull final ResponseBody responseBody) {
super.onResponse(responseBody);
logger.debug("Enrollment successful: " + courseId);
Toast.makeText(activity, activity.getString(R.string.you_are_now_enrolled), Toast.LENGTH_SHORT).show();
environment.getAnalyticsRegistry().trackEnrolmentSuccess(courseId, emailOptIn);
new Handler().post(new Runnable() {
@Override
public void run() {
courseApi.getEnrolledCourses().enqueue(new CourseAPI.GetCourseByIdCallback(
activity, courseId,
new TaskProgressCallback.ProgressViewController(progressWheel)) {
@Override
protected void onResponse(@NonNull final EnrolledCoursesResponse course) {
enrollCallback.onResponse(course);
environment.getRouter().showMainDashboard(activity);
environment.getRouter().showCourseDashboardTabs(activity, course, false);
}
@Override
protected void onFailure(@NonNull final Throwable error) {
logger.warn("Error during enroll api call\n" + error);
isTaskInProgress = false;
enrollCallback.onFailure(error);
Toast.makeText(activity, R.string.cannot_show_dashboard, Toast.LENGTH_SHORT).show();
}
});
}
});
}
@Override
protected void onFailure(@NonNull Throwable error) {
logger.warn("Error during enroll api call\n" + error);
isTaskInProgress = false;
enrollCallback.onFailure(error);
if (activity instanceof BaseFragmentActivity) {
final BaseFragmentActivity baseFragmentActivity = (BaseFragmentActivity) activity;
if (error instanceof HttpStatusException && ((HttpStatusException) error).getStatusCode() == HttpStatus.BAD_REQUEST) {
final HashMap<String, CharSequence> params = new HashMap<>();
params.put("platform_name", environment.getConfig().getPlatformName());
final CharSequence message = ResourceUtil.getFormattedString(activity.getResources(), R.string.enrollment_error_message, params);
baseFragmentActivity.showAlertDialog(activity.getString(R.string.enrollment_error_title), message.toString());
} else {
showEnrollErrorMessage(baseFragmentActivity, courseId, emailOptIn);
}
}
}
});
}
private void showEnrollErrorMessage(@NonNull BaseFragmentActivity baseFragmentActivity,
final String courseId, final boolean emailOptIn) {
if (baseFragmentActivity.isActivityStarted()) {
Map<String, String> dialogMap = new HashMap<String, String>();
dialogMap.put("message_1", activity.getString(R.string.enrollment_failure));
dialogMap.put("yes_button", activity.getString(R.string.try_again));
dialogMap.put("no_button", activity.getString(R.string.label_cancel));
EnrollmentFailureDialogFragment failureDialogFragment = EnrollmentFailureDialogFragment
.newInstance(dialogMap, new IDialogCallback() {
@Override
public void onPositiveClicked() {
onClickEnroll(courseId, emailOptIn);
}
@Override
public void onNegativeClicked() {
}
});
failureDialogFragment.setStyle(DialogFragment.STYLE_NO_TITLE, 0);
failureDialogFragment.show(activity.getSupportFragmentManager(), "dialog");
failureDialogFragment.setCancelable(false);
}
}
}
|
OpenEdXMobile/src/main/java/org/edx/mobile/util/links/DefaultActionListener.java
|
package org.edx.mobile.util.links;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.FragmentActivity;
import android.text.TextUtils;
import android.view.View;
import android.widget.Toast;
import com.google.inject.Inject;
import org.edx.mobile.R;
import org.edx.mobile.base.BaseFragmentActivity;
import org.edx.mobile.core.IEdxEnvironment;
import org.edx.mobile.course.CourseAPI;
import org.edx.mobile.course.CourseService;
import org.edx.mobile.http.HttpStatus;
import org.edx.mobile.http.HttpStatusException;
import org.edx.mobile.logger.Logger;
import org.edx.mobile.model.api.EnrolledCoursesResponse;
import org.edx.mobile.util.ResourceUtil;
import org.edx.mobile.view.common.TaskProgressCallback;
import org.edx.mobile.view.custom.URLInterceptorWebViewClient;
import org.edx.mobile.view.dialog.EnrollmentFailureDialogFragment;
import org.edx.mobile.view.dialog.IDialogCallback;
import java.util.HashMap;
import java.util.Map;
import okhttp3.ResponseBody;
import roboguice.RoboGuice;
/**
* A ready to use implementation of {@link org.edx.mobile.view.custom.URLInterceptorWebViewClient.ActionListener}
* for all the classes that need to handle a WebView's recognized links as defined in
* {@link WebViewLink} class.
*/
public class DefaultActionListener implements URLInterceptorWebViewClient.ActionListener {
private final Logger logger = new Logger(URLInterceptorWebViewClient.class);
/**
* Provides callbacks to know about the status of enrollment in a course.
*/
public interface EnrollCallback {
void onResponse(@NonNull final EnrolledCoursesResponse course);
void onFailure(@NonNull Throwable error);
void onUserNotLoggedIn(@NonNull String courseId, boolean emailOptIn);
}
@Inject
private IEdxEnvironment environment;
@Inject
private CourseService courseService;
@Inject
private CourseAPI courseApi;
private FragmentActivity activity;
private View progressWheel;
private EnrollCallback enrollCallback;
private boolean isTaskInProgress = false;
public DefaultActionListener(@NonNull FragmentActivity activity, @NonNull View progressWheel,
@NonNull EnrollCallback enrollCallback) {
this.activity = activity;
this.progressWheel = progressWheel;
this.enrollCallback = enrollCallback;
RoboGuice.injectMembers(activity, this);
}
@Override
public void onLinkRecognized(@NonNull WebViewLink helper) {
switch (helper.authority) {
case ENROLLED_PROGRAM_INFO: {
final CharSequence url = ResourceUtil.getFormattedString(
environment.getConfig().getDiscoveryConfig().getProgramDiscoveryConfig().getInfoUrlTemplate(),
WebViewLink.Param.PATH_ID,
helper.params.get(WebViewLink.Param.PATH_ID));
environment.getRouter().showAuthenticatedWebviewActivity(activity, url.toString(),
activity.getString(R.string.label_my_programs));
break;
}
case ENROLLED_COURSE_INFO: {
final String courseId = helper.params.get(WebViewLink.Param.COURSE_ID);
new Handler().post(new Runnable() {
@Override
public void run() {
courseApi.getEnrolledCourses().enqueue(new CourseAPI.GetCourseByIdCallback(
activity, courseId,
new TaskProgressCallback.ProgressViewController(progressWheel)) {
@Override
protected void onResponse(@NonNull final EnrolledCoursesResponse course) {
environment.getRouter().showCourseDashboardTabs(activity, course, false);
}
@Override
protected void onFailure(@NonNull final Throwable error) {
Toast.makeText(activity, R.string.cannot_show_dashboard, Toast.LENGTH_SHORT).show();
}
});
}
});
break;
}
case COURSE_INFO: {
final String pathId = helper.params.get(WebViewLink.Param.PATH_ID);
if (!TextUtils.isEmpty(pathId)) {
logger.debug("PathId" + pathId);
environment.getRouter().showCourseInfo(activity, pathId);
}
break;
}
case PROGRAM_INFO: {
final String pathId = helper.params.get(WebViewLink.Param.PATH_ID);
if (!TextUtils.isEmpty(pathId)) {
logger.debug("PathId" + pathId);
// Program info coming soon
environment.getRouter().showProgramInfo(activity, pathId);
}
break;
}
case ENROLL: {
final String courseId = helper.params.get(WebViewLink.Param.COURSE_ID);
final String emailOptIn = helper.params.get(WebViewLink.Param.EMAIL_OPT);
onClickEnroll(courseId, Boolean.getBoolean(emailOptIn));
break;
}
}
}
public void onClickEnroll(@NonNull final String courseId, final boolean emailOptIn) {
if (isTaskInProgress) {
// avoid duplicate actions
logger.debug("already enroll task is in progress, so skipping Enroll action");
return;
}
if (environment.getLoginPrefs().getUsername() == null) {
enrollCallback.onUserNotLoggedIn(courseId, emailOptIn);
return;
}
isTaskInProgress = true;
environment.getAnalyticsRegistry().trackEnrollClicked(courseId, emailOptIn);
logger.debug("CourseId - " + courseId);
logger.debug("Email option - " + emailOptIn);
courseService.enrollInACourse(new CourseService.EnrollBody(courseId, emailOptIn))
.enqueue(new CourseService.EnrollCallback(
activity,
new TaskProgressCallback.ProgressViewController(progressWheel)) {
@Override
protected void onResponse(@NonNull final ResponseBody responseBody) {
super.onResponse(responseBody);
logger.debug("Enrollment successful: " + courseId);
Toast.makeText(activity, activity.getString(R.string.you_are_now_enrolled), Toast.LENGTH_SHORT).show();
environment.getAnalyticsRegistry().trackEnrolmentSuccess(courseId, emailOptIn);
new Handler().post(new Runnable() {
@Override
public void run() {
courseApi.getEnrolledCourses().enqueue(new CourseAPI.GetCourseByIdCallback(
activity, courseId,
new TaskProgressCallback.ProgressViewController(progressWheel)) {
@Override
protected void onResponse(@NonNull final EnrolledCoursesResponse course) {
enrollCallback.onResponse(course);
environment.getRouter().showMainDashboard(activity);
environment.getRouter().showCourseDashboardTabs(activity, course, false);
}
@Override
protected void onFailure(@NonNull final Throwable error) {
logger.warn("Error during enroll api call\n" + error);
isTaskInProgress = false;
enrollCallback.onFailure(error);
Toast.makeText(activity, R.string.cannot_show_dashboard, Toast.LENGTH_SHORT).show();
}
});
}
});
}
@Override
protected void onFailure(@NonNull Throwable error) {
logger.warn("Error during enroll api call\n" + error);
isTaskInProgress = false;
enrollCallback.onFailure(error);
if (activity instanceof BaseFragmentActivity) {
final BaseFragmentActivity baseFragmentActivity = (BaseFragmentActivity) activity;
if (error instanceof HttpStatusException && ((HttpStatusException) error).getStatusCode() == HttpStatus.BAD_REQUEST) {
final HashMap<String, CharSequence> params = new HashMap<>();
params.put("platform_name", environment.getConfig().getPlatformName());
final CharSequence message = ResourceUtil.getFormattedString(activity.getResources(), R.string.enrollment_error_message, params);
baseFragmentActivity.showAlertDialog(activity.getString(R.string.enrollment_error_title), message.toString());
} else {
showEnrollErrorMessage(baseFragmentActivity, courseId, emailOptIn);
}
}
}
});
}
private void showEnrollErrorMessage(@NonNull BaseFragmentActivity baseFragmentActivity,
final String courseId, final boolean emailOptIn) {
if (baseFragmentActivity.isActivityStarted()) {
Map<String, String> dialogMap = new HashMap<String, String>();
dialogMap.put("message_1", activity.getString(R.string.enrollment_failure));
dialogMap.put("yes_button", activity.getString(R.string.try_again));
dialogMap.put("no_button", activity.getString(R.string.label_cancel));
EnrollmentFailureDialogFragment failureDialogFragment = EnrollmentFailureDialogFragment
.newInstance(dialogMap, new IDialogCallback() {
@Override
public void onPositiveClicked() {
onClickEnroll(courseId, emailOptIn);
}
@Override
public void onNegativeClicked() {
}
});
failureDialogFragment.setStyle(DialogFragment.STYLE_NO_TITLE, 0);
failureDialogFragment.show(activity.getSupportFragmentManager(), "dialog");
failureDialogFragment.setCancelable(false);
}
}
}
|
Fix program details screen loading issue
- LEARNER-7088
- It was happening due to wrong url
|
OpenEdXMobile/src/main/java/org/edx/mobile/util/links/DefaultActionListener.java
|
Fix program details screen loading issue
|
|
Java
|
apache-2.0
|
b53d6057f96721530d4fe62cddd3430ee9662f20
| 0
|
grfeng/conductor,Netflix/conductor,Netflix/conductor,Netflix/conductor,grfeng/conductor,grfeng/conductor,Netflix/conductor,grfeng/conductor,grfeng/conductor,grfeng/conductor,Netflix/conductor
|
/*
* Copyright 2016 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.conductor.core.execution;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.common.metadata.tasks.PollData;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.run.Workflow.WorkflowStatus;
import com.netflix.conductor.core.WorkflowContext;
import com.netflix.conductor.core.config.Configuration;
import com.netflix.conductor.core.execution.ApplicationException.Code;
import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome;
import com.netflix.conductor.core.execution.tasks.SubWorkflow;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.dao.ExecutionDAO;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.CANCELED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED_WITH_TERMINAL_ERROR;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.SCHEDULED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.SKIPPED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.valueOf;
import static com.netflix.conductor.core.execution.ApplicationException.Code.CONFLICT;
import static com.netflix.conductor.core.execution.ApplicationException.Code.INVALID_INPUT;
import static com.netflix.conductor.core.execution.ApplicationException.Code.NOT_FOUND;
import static java.util.Comparator.comparingInt;
import static java.util.stream.Collectors.groupingBy;
import static java.util.stream.Collectors.maxBy;
import static java.util.stream.Collectors.toSet;
/**
* @author Viren Workflow services provider interface
*/
@Trace
public class WorkflowExecutor {
private static final Logger logger = LoggerFactory.getLogger(WorkflowExecutor.class);
private final MetadataDAO metadataDAO;
private final ExecutionDAO executionDAO;
private final QueueDAO queueDAO;
private final DeciderService deciderService;
private final Configuration config;
private final ParametersUtils parametersUtils;
public static final String deciderQueue = "_deciderQueue";
private int activeWorkerLastPollnSecs;
@Inject
public WorkflowExecutor(DeciderService deciderService, MetadataDAO metadataDAO, ExecutionDAO executionDAO,
QueueDAO queueDAO, ParametersUtils parametersUtils, Configuration config) {
this.deciderService = deciderService;
this.metadataDAO = metadataDAO;
this.executionDAO = executionDAO;
this.queueDAO = queueDAO;
this.config = config;
this.parametersUtils = parametersUtils;
activeWorkerLastPollnSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10);
}
public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input) {
return startWorkflow(name, version, correlationId, input, null);
}
public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input, String event) {
return startWorkflow(name, version, input, correlationId, null, null, event);
}
public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input, String externalInputPayloadStoragePath, String event, Map<String, String> taskToDomain) {
return startWorkflow(name, version, input, externalInputPayloadStoragePath, correlationId, null, null, event, taskToDomain);
}
public String startWorkflow(String name, int version, Map<String, Object> input, String correlationId, String parentWorkflowId, String parentWorkflowTaskId, String event) {
return startWorkflow(name, version, input, null, correlationId, parentWorkflowId, parentWorkflowTaskId, event, null);
}
private final Predicate<PollData> validateLastPolledTime = pd -> pd.getLastPollTime() > System.currentTimeMillis() - (activeWorkerLastPollnSecs * 1000);
private final Predicate<Task> isSystemTask = task -> SystemTaskType.is(task.getTaskType());
private final Predicate<Task> isNonTerminalTask = task -> !task.getStatus().isTerminal();
public String startWorkflow(String workflowName, int workflowVersion, Map<String, Object> workflowInput,
String externalInputPayloadStoragePath, String correlationId, String parentWorkflowId,
String parentWorkflowTaskId, String event, Map<String, String> taskToDomain) {
// perform validations
validateWorkflow(workflowName, workflowVersion, workflowInput, externalInputPayloadStoragePath);
//A random UUID is assigned to the work flow instance
String workflowId = IDGenerator.generate();
// Persist the Workflow
Workflow workflow = new Workflow();
workflow.setWorkflowId(workflowId);
workflow.setCorrelationId(correlationId);
workflow.setWorkflowType(workflowName);
workflow.setVersion(workflowVersion);
workflow.setInput(workflowInput);
workflow.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath);
workflow.setStatus(WorkflowStatus.RUNNING);
workflow.setParentWorkflowId(parentWorkflowId);
workflow.setParentWorkflowTaskId(parentWorkflowTaskId);
workflow.setOwnerApp(WorkflowContext.get().getClientApp());
workflow.setCreateTime(System.currentTimeMillis());
workflow.setUpdatedBy(null);
workflow.setUpdateTime(null);
workflow.setEvent(event);
workflow.setTaskToDomain(taskToDomain);
executionDAO.createWorkflow(workflow);
logger.info("A new instance of workflow {} created with workflow id {}", workflowName, workflowId);
//then decide to see if anything needs to be done as part of the workflow
decide(workflowId);
return workflowId;
}
/**
* Performs validations for starting a workflow
*
* @throws ApplicationException if the validation fails
*/
private void validateWorkflow(String workflowName, int workflowVersion, Map<String, Object> workflowInput, String externalStoragePath) {
try {
//Check if the workflow definition is valid
WorkflowDef workflowDefinition = metadataDAO.get(workflowName, workflowVersion);
if (workflowDefinition == null) {
logger.error("There is no workflow defined with name {} and version {}", workflowName, workflowVersion);
throw new ApplicationException(Code.NOT_FOUND, "No such workflow defined. name=" + workflowName + ", version=" + workflowVersion);
}
//because everything else is a system defined task
Set<String> missingTaskDefs = workflowDefinition.all().stream()
.filter(task -> task.getType().equals(WorkflowTask.Type.SIMPLE.name()))
.map(WorkflowTask::getName)
.filter(task -> metadataDAO.getTaskDef(task) == null)
.collect(toSet());
if (!missingTaskDefs.isEmpty()) {
logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefs);
throw new ApplicationException(INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefs);
}
//Check if the input to the workflow is not null
if (workflowInput == null && StringUtils.isBlank(externalStoragePath)) {
logger.error("The input for the workflow {} cannot be NULL", workflowName);
throw new ApplicationException(INVALID_INPUT, "NULL input passed when starting workflow");
}
} catch (Exception e) {
Monitors.recordWorkflowStartError(workflowName, WorkflowContext.get().getClientApp());
throw e;
}
}
public String resetCallbacksForInProgressTasks(String workflowId) {
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
if (workflow.getStatus().isTerminal()) {
throw new ApplicationException(CONFLICT, "Workflow is completed. status=" + workflow.getStatus());
}
// Get tasks that are in progress and have callbackAfterSeconds > 0
// and set the callbackAfterSeconds to 0;
for (Task t : workflow.getTasks()) {
if (t.getStatus().equals(IN_PROGRESS) &&
t.getCallbackAfterSeconds() > 0) {
if (queueDAO.setOffsetTime(QueueUtils.getQueueName(t), t.getTaskId(), 0)) {
t.setCallbackAfterSeconds(0);
executionDAO.updateTask(t);
}
}
}
return workflowId;
}
public String rerun(RerunWorkflowRequest request) {
Preconditions.checkNotNull(request.getReRunFromWorkflowId(), "reRunFromWorkflowId is missing");
if (!rerunWF(request.getReRunFromWorkflowId(), request.getReRunFromTaskId(), request.getTaskInput(),
request.getWorkflowInput(), request.getCorrelationId())) {
throw new ApplicationException(INVALID_INPUT, "Task " + request.getReRunFromTaskId() + " not found");
}
return request.getReRunFromWorkflowId();
}
public void rewind(String workflowId) {
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
if (!workflow.getStatus().isTerminal()) {
throw new ApplicationException(CONFLICT, "Workflow is still running. status=" + workflow.getStatus());
}
WorkflowDef workflowDef = metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion());
if (!workflowDef.isRestartable() && workflow.getStatus().equals(WorkflowStatus.COMPLETED)) { // Can only restart non completed workflows when the configuration is set to false
throw new ApplicationException(CONFLICT, String.format("WorkflowId: %s is an instance of WorkflowDef: %s and version: %d and is non restartable",
workflowId, workflowDef.getName(), workflowDef.getVersion()));
}
// Remove all the tasks...
workflow.getTasks().forEach(t -> executionDAO.removeTask(t.getTaskId()));
workflow.getTasks().clear();
workflow.setReasonForIncompletion(null);
workflow.setStartTime(System.currentTimeMillis());
workflow.setEndTime(0);
// Change the status to running
workflow.setStatus(WorkflowStatus.RUNNING);
executionDAO.updateWorkflow(workflow);
decide(workflowId);
}
/**
* Gets the last instance of each failed task and reschedule each
* Gets all cancelled tasks and schedule all of them except JOIN (join should change status to INPROGRESS)
* Switch workflow back to RUNNING status and aall decider.
* @param workflowId
*/
public void retry(String workflowId) {
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
if (!workflow.getStatus().isTerminal()) {
throw new ApplicationException(CONFLICT, "Workflow is still running. status=" + workflow.getStatus());
}
if (workflow.getTasks().isEmpty()) {
throw new ApplicationException(CONFLICT, "Workflow has not started yet");
}
List<Task> failedTasks = getFailedTasksToRetry(workflow);
List<Task> cancelledTasks = workflow.getTasks().stream()
.filter(x->CANCELED.equals(x.getStatus())).collect(Collectors.toList());
if (failedTasks.isEmpty()) {
throw new ApplicationException(CONFLICT,
"There are no failed tasks! Use restart if you want to attempt entire workflow execution again.");
}
List<Task> rescheduledTasks = new ArrayList<>();
failedTasks.forEach(failedTask -> {
rescheduledTasks.add(taskToBeRescheduled(failedTask));
});
// Reschedule the cancelled task but if the join is cancelled set that to in progress
cancelledTasks.forEach(cancelledTask -> {
if (cancelledTask.getTaskType().equalsIgnoreCase(WorkflowTask.Type.JOIN.toString())) {
cancelledTask.setStatus(IN_PROGRESS);
executionDAO.updateTask(cancelledTask);
} else {
rescheduledTasks.add(taskToBeRescheduled(cancelledTask));
}
});
scheduleTask(workflow, rescheduledTasks);
workflow.setStatus(WorkflowStatus.RUNNING);
executionDAO.updateWorkflow(workflow);
executionDAO.updateTasks(workflow.getTasks());
decide(workflowId);
}
/**
* Get all failed and cancelled tasks.
* for failed tasks - get one for each task reference name(latest failed using seq id)
* @param workflow
* @return list of latest failed tasks, one for each task reference reference type.
*/
@VisibleForTesting
List<Task> getFailedTasksToRetry(Workflow workflow) {
return workflow.getTasks().stream()
.filter(x -> FAILED.equals(x.getStatus()))
.collect(groupingBy(Task::getReferenceTaskName, maxBy(comparingInt(Task::getSeq))))
.values().stream().filter(Optional::isPresent).map(Optional::get).collect(Collectors.toList());
}
/**
* Reschedule a task
* @param task failed or cancelled task
* @return new instance of a task with "SCHEDULED" status
*/
private Task taskToBeRescheduled(Task task) {
Task taskToBeRetried = task.copy();
taskToBeRetried.setTaskId(IDGenerator.generate());
taskToBeRetried.setRetriedTaskId(task.getTaskId());
taskToBeRetried.setStatus(SCHEDULED);
taskToBeRetried.setRetryCount(task.getRetryCount() + 1);
// update the failed task in the DAO
task.setRetried(true);
executionDAO.updateTask(task);
return taskToBeRetried;
}
public Task getPendingTaskByWorkflow(String taskReferenceName, String workflowId) {
return executionDAO.getTasksForWorkflow(workflowId).stream()
.filter(isNonTerminalTask)
.filter(task -> task.getReferenceTaskName().equals(taskReferenceName))
.findFirst() // There can only be one task by a given reference name running at a time.
.orElse(null);
}
@VisibleForTesting
void completeWorkflow(Workflow wf) {
logger.debug("Completing workflow execution for {}", wf.getWorkflowId());
Workflow workflow = executionDAO.getWorkflow(wf.getWorkflowId(), false);
if (workflow.getStatus().equals(WorkflowStatus.COMPLETED)) {
executionDAO.removeFromPendingWorkflow(workflow.getWorkflowType(), workflow.getWorkflowId());
logger.info("Workflow has already been completed. Current status={}, workflowId= {}", workflow.getStatus(), wf.getWorkflowId());
return;
}
if (workflow.getStatus().isTerminal()) {
String msg = "Workflow has already been completed. Current status " + workflow.getStatus();
throw new ApplicationException(CONFLICT, msg);
}
deciderService.updateWorkflowOutput(wf, null);
workflow.setStatus(WorkflowStatus.COMPLETED);
workflow.setOutput(wf.getOutput());
workflow.setExternalOutputPayloadStoragePath(wf.getExternalOutputPayloadStoragePath());
executionDAO.updateWorkflow(workflow);
logger.debug("Completed workflow execution for {}", wf.getWorkflowId());
executionDAO.updateTasks(wf.getTasks());
// If the following task, for some reason fails, the sweep will take care of this again!
if (workflow.getParentWorkflowId() != null) {
Workflow parent = executionDAO.getWorkflow(workflow.getParentWorkflowId(), false);
WorkflowDef parentDef = metadataDAO.get(parent.getWorkflowType(), parent.getVersion());
logger.debug("Completed sub-workflow {}, deciding parent workflow {}", wf.getWorkflowId(), wf.getParentWorkflowId());
Task parentWorkflowTask = executionDAO.getTask(workflow.getParentWorkflowTaskId());
// If parent is FAILED and the sub workflow task in parent is FAILED, we want to resume them
if (StringUtils.isBlank(parentDef.getFailureWorkflow()) && parent.getStatus() == WorkflowStatus.FAILED && parentWorkflowTask.getStatus() == FAILED) {
parentWorkflowTask.setStatus(IN_PROGRESS);
executionDAO.updateTask(parentWorkflowTask);
parent.setStatus(WorkflowStatus.RUNNING);
executionDAO.updateWorkflow(parent);
}
decide(parent.getWorkflowId());
}
Monitors.recordWorkflowCompletion(workflow.getWorkflowType(), workflow.getEndTime() - workflow.getStartTime(), wf.getOwnerApp());
queueDAO.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue
logger.debug("Removed workflow {} from decider queue", wf.getWorkflowId());
}
public void terminateWorkflow(String workflowId, String reason) {
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
workflow.setStatus(WorkflowStatus.TERMINATED);
terminateWorkflow(workflow, reason, null);
}
public void terminateWorkflow(Workflow workflow, String reason, String failureWorkflow) {
if (!workflow.getStatus().isTerminal()) {
workflow.setStatus(WorkflowStatus.TERMINATED);
}
deciderService.updateWorkflowOutput(workflow, null);
String workflowId = workflow.getWorkflowId();
workflow.setReasonForIncompletion(reason);
executionDAO.updateWorkflow(workflow);
List<Task> tasks = workflow.getTasks();
for (Task task : tasks) {
if (!task.getStatus().isTerminal()) {
// Cancel the ones which are not completed yet....
task.setStatus(CANCELED);
if (isSystemTask.test(task)) {
WorkflowSystemTask workflowSystemTask = WorkflowSystemTask.get(task.getTaskType());
workflowSystemTask.cancel(workflow, task, this);
}
executionDAO.updateTask(task);
}
// And remove from the task queue if they were there
queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId());
}
// If the following lines, for some reason fails, the sweep will take
// care of this again!
if (workflow.getParentWorkflowId() != null) {
Workflow parent = executionDAO.getWorkflow(workflow.getParentWorkflowId(), false);
decide(parent.getWorkflowId());
}
if (!StringUtils.isBlank(failureWorkflow)) {
Map<String, Object> input = new HashMap<>(workflow.getInput());
input.put("workflowId", workflowId);
input.put("reason", reason);
input.put("failureStatus", workflow.getStatus().toString());
try {
WorkflowDef latestFailureWorkflow = metadataDAO.getLatest(failureWorkflow);
String failureWFId = startWorkflow(failureWorkflow, latestFailureWorkflow.getVersion(), workflowId, input);
workflow.getOutput().put("conductor.failure_workflow", failureWFId);
} catch (Exception e) {
logger.error("Failed to start error workflow", e);
workflow.getOutput().put("conductor.failure_workflow", "Error workflow " + failureWorkflow + " failed to start. reason: " + e.getMessage());
Monitors.recordWorkflowStartError(failureWorkflow, WorkflowContext.get().getClientApp());
}
}
queueDAO.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue
executionDAO.removeFromPendingWorkflow(workflow.getWorkflowType(), workflow.getWorkflowId());
// Send to atlas
Monitors.recordWorkflowTermination(workflow.getWorkflowType(), workflow.getStatus(), workflow.getOwnerApp());
}
public void updateTask(TaskResult taskResult) {
if (taskResult == null) {
logger.info("null task given for update");
throw new ApplicationException(Code.INVALID_INPUT, "Task object is null");
}
String workflowId = taskResult.getWorkflowInstanceId();
Workflow workflowInstance = executionDAO.getWorkflow(workflowId);
Task task = executionDAO.getTask(taskResult.getTaskId());
logger.debug("Task: {} belonging to Workflow {} being updated", task, workflowInstance);
String taskQueueName = QueueUtils.getQueueName(task);
if (workflowInstance.getStatus().isTerminal()) {
// Workflow is in terminal state
queueDAO.remove(taskQueueName, taskResult.getTaskId());
logger.debug("Workflow: {} is in terminal state Task: {} removed from Queue: {} during update task", workflowInstance, task, taskQueueName);
if (!task.getStatus().isTerminal()) {
task.setStatus(COMPLETED);
}
task.setOutputData(taskResult.getOutputData());
task.setReasonForIncompletion(taskResult.getReasonForIncompletion());
task.setWorkerId(taskResult.getWorkerId());
executionDAO.updateTask(task);
String msg = String.format("Workflow %s is already completed as %s, task=%s, reason=%s",
workflowInstance.getWorkflowId(), workflowInstance.getStatus(), task.getTaskType(), workflowInstance.getReasonForIncompletion());
logger.info(msg);
Monitors.recordUpdateConflict(task.getTaskType(), workflowInstance.getWorkflowType(), workflowInstance.getStatus());
return;
}
if (task.getStatus().isTerminal()) {
// Task was already updated....
queueDAO.remove(taskQueueName, taskResult.getTaskId());
logger.debug("Task: {} is in terminal state and is removed from the queue {} ", task, taskQueueName);
String msg = String.format("Task is already completed as %s@%d, workflow status=%s, workflowId=%s, taskId=%s",
task.getStatus(), task.getEndTime(), workflowInstance.getStatus(), workflowInstance.getWorkflowId(), task.getTaskId());
logger.info(msg);
Monitors.recordUpdateConflict(task.getTaskType(), workflowInstance.getWorkflowType(), task.getStatus());
return;
}
task.setStatus(valueOf(taskResult.getStatus().name()));
task.setOutputData(taskResult.getOutputData());
task.setExternalOutputPayloadStoragePath(taskResult.getExternalOutputPayloadStoragePath());
task.setReasonForIncompletion(taskResult.getReasonForIncompletion());
task.setWorkerId(taskResult.getWorkerId());
task.setCallbackAfterSeconds(taskResult.getCallbackAfterSeconds());
if (task.getStatus().isTerminal()) {
task.setEndTime(System.currentTimeMillis());
}
executionDAO.updateTask(task);
//If the task has failed update the failed task reference name in the workflow.
//This gives the ability to look at workflow and see what tasks have failed at a high level.
if (FAILED.equals(task.getStatus()) || FAILED_WITH_TERMINAL_ERROR.equals(task.getStatus())) {
workflowInstance.getFailedReferenceTaskNames().add(task.getReferenceTaskName());
executionDAO.updateWorkflow(workflowInstance);
logger.debug("Task: {} has a {} status and the Workflow has been updated with failed task reference", task, task.getStatus());
}
taskResult.getLogs().forEach(taskExecLog -> taskExecLog.setTaskId(task.getTaskId()));
executionDAO.addTaskExecLog(taskResult.getLogs());
switch (task.getStatus()) {
case COMPLETED:
case CANCELED:
case FAILED:
case FAILED_WITH_TERMINAL_ERROR:
queueDAO.remove(taskQueueName, taskResult.getTaskId());
logger.debug("Task: {} removed from taskQueue: {} since the task status is {}", task, taskQueueName, task.getStatus().name());
break;
case IN_PROGRESS:
// put it back in queue based on callbackAfterSeconds
long callBack = taskResult.getCallbackAfterSeconds();
queueDAO.remove(taskQueueName, task.getTaskId());
logger.debug("Task: {} removed from taskQueue: {} since the task status is {}", task, taskQueueName, task.getStatus().name());
queueDAO.push(taskQueueName, task.getTaskId(), callBack); // Milliseconds
logger.debug("Task: {} pushed back to taskQueue: {} since the task status is {} with callbackAfterSeconds: {}", task, taskQueueName, task.getStatus().name(), callBack);
break;
default:
break;
}
decide(workflowId);
if (task.getStatus().isTerminal()) {
long duration = getTaskDuration(0, task);
long lastDuration = task.getEndTime() - task.getStartTime();
Monitors.recordTaskExecutionTime(task.getTaskDefName(), duration, true, task.getStatus());
Monitors.recordTaskExecutionTime(task.getTaskDefName(), lastDuration, false, task.getStatus());
}
}
public List<Task> getTasks(String taskType, String startKey, int count) {
return executionDAO.getTasks(taskType, startKey, count);
}
public List<Workflow> getRunningWorkflows(String workflowName) {
return executionDAO.getPendingWorkflowsByType(workflowName);
}
public List<String> getWorkflows(String name, Integer version, Long startTime, Long endTime) {
List<Workflow> workflowsByType = executionDAO.getWorkflowsByType(name, startTime, endTime);
return workflowsByType.stream()
.filter(workflow -> workflow.getVersion() == version)
.map(Workflow::getWorkflowId)
.collect(Collectors.toList());
}
public List<String> getRunningWorkflowIds(String workflowName) {
return executionDAO.getRunningWorkflowIds(workflowName);
}
/**
* @param workflowId ID of the workflow to evaluate the state for
* @return true if the workflow is in terminal state, false otherwise.
*/
public boolean decide(String workflowId) {
// If it is a new workflow, the tasks will be still empty even though include tasks is true
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
WorkflowDef workflowDef = metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion());
try {
DeciderOutcome outcome = deciderService.decide(workflow, workflowDef);
if (outcome.isComplete) {
completeWorkflow(workflow);
return true;
}
List<Task> tasksToBeScheduled = outcome.tasksToBeScheduled;
setTaskDomains(tasksToBeScheduled, workflow);
List<Task> tasksToBeUpdated = outcome.tasksToBeUpdated;
List<Task> tasksToBeRequeued = outcome.tasksToBeRequeued;
boolean stateChanged = false;
if (!tasksToBeRequeued.isEmpty()) {
addTaskToQueue(tasksToBeRequeued);
}
workflow.getTasks().addAll(tasksToBeScheduled);
for (Task task : tasksToBeScheduled) {
if (isSystemTask.and(isNonTerminalTask).test(task)) {
WorkflowSystemTask workflowSystemTask = WorkflowSystemTask.get(task.getTaskType());
if (!workflowSystemTask.isAsync() && workflowSystemTask.execute(workflow, task, this)) {
tasksToBeUpdated.add(task);
stateChanged = true;
}
}
}
stateChanged = scheduleTask(workflow, tasksToBeScheduled) || stateChanged;
if (!outcome.tasksToBeUpdated.isEmpty() || !outcome.tasksToBeScheduled.isEmpty()) {
executionDAO.updateTasks(tasksToBeUpdated);
executionDAO.updateWorkflow(workflow);
queueDAO.push(deciderQueue, workflow.getWorkflowId(), config.getSweepFrequency());
}
if (stateChanged) {
decide(workflowId);
}
} catch (TerminateWorkflowException twe) {
logger.info("Execution terminated of workflow: {} of type: {}", workflowId, workflowDef.getName(), twe);
terminate(workflowDef, workflow, twe);
return true;
} catch (RuntimeException e) {
logger.error("Error deciding workflow: {}", workflowId, e);
throw e;
}
return false;
}
public void pauseWorkflow(String workflowId) {
WorkflowStatus status = WorkflowStatus.PAUSED;
Workflow workflow = executionDAO.getWorkflow(workflowId, false);
if (workflow.getStatus().isTerminal()) {
throw new ApplicationException(CONFLICT, "Workflow id " + workflowId + " has ended, status cannot be updated.");
}
if (workflow.getStatus().equals(status)) {
return; //Already paused!
}
workflow.setStatus(status);
executionDAO.updateWorkflow(workflow);
}
public void resumeWorkflow(String workflowId) {
Workflow workflow = executionDAO.getWorkflow(workflowId, false);
if (!workflow.getStatus().equals(WorkflowStatus.PAUSED)) {
throw new IllegalStateException("The workflow " + workflowId + " is not PAUSED so cannot resume");
}
workflow.setStatus(WorkflowStatus.RUNNING);
executionDAO.updateWorkflow(workflow);
decide(workflowId);
}
public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) {
Workflow wf = executionDAO.getWorkflow(workflowId, true);
// If the wf is not running then cannot skip any task
if (!wf.getStatus().equals(WorkflowStatus.RUNNING)) {
String errorMsg = String.format("The workflow %s is not running so the task referenced by %s cannot be skipped", workflowId, taskReferenceName);
throw new IllegalStateException(errorMsg);
}
// Check if the reference name is as per the workflowdef
WorkflowDef wfd = metadataDAO.get(wf.getWorkflowType(), wf.getVersion());
WorkflowTask wft = wfd.getTaskByRefName(taskReferenceName);
if (wft == null) {
String errorMsg = String.format("The task referenced by %s does not exist in the WorkflowDefinition %s", taskReferenceName, wf.getWorkflowType());
throw new IllegalStateException(errorMsg);
}
// If the task is already started the again it cannot be skipped
wf.getTasks().forEach(task -> {
if (task.getReferenceTaskName().equals(taskReferenceName)) {
String errorMsg = String.format("The task referenced %s has already been processed, cannot be skipped", taskReferenceName);
throw new IllegalStateException(errorMsg);
}
});
// Now create a "SKIPPED" task for this workflow
Task theTask = new Task();
theTask.setTaskId(IDGenerator.generate());
theTask.setReferenceTaskName(taskReferenceName);
theTask.setWorkflowInstanceId(workflowId);
theTask.setStatus(SKIPPED);
theTask.setTaskType(wft.getName());
theTask.setCorrelationId(wf.getCorrelationId());
if (skipTaskRequest != null) {
theTask.setInputData(skipTaskRequest.getTaskInput());
theTask.setOutputData(skipTaskRequest.getTaskOutput());
}
executionDAO.createTasks(Collections.singletonList(theTask));
decide(workflowId);
}
public Workflow getWorkflow(String workflowId, boolean includeTasks) {
return executionDAO.getWorkflow(workflowId, includeTasks);
}
private void addTaskToQueue(Task task) {
// put in queue
String taskQueueName = QueueUtils.getQueueName(task);
queueDAO.remove(taskQueueName, task.getTaskId());
if (task.getCallbackAfterSeconds() > 0) {
queueDAO.push(taskQueueName, task.getTaskId(), task.getCallbackAfterSeconds());
} else {
queueDAO.push(taskQueueName, task.getTaskId(), 0);
}
logger.debug("Added task {} to queue {} with call back seconds {}", task, taskQueueName, task.getCallbackAfterSeconds());
}
//Executes the async system task
public void executeSystemTask(WorkflowSystemTask systemTask, String taskId, int unackTimeout) {
try {
Task task = executionDAO.getTask(taskId);
logger.info("Task: {} fetched from execution DAO for taskId: {}", task, taskId);
if (task.getStatus().isTerminal()) {
//Tune the SystemTaskWorkerCoordinator's queues - if the queue size is very big this can happen!
logger.info("Task {}/{} was already completed.", task.getTaskType(), task.getTaskId());
queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId());
return;
}
String workflowId = task.getWorkflowInstanceId();
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
if (task.getStartTime() == 0) {
task.setStartTime(System.currentTimeMillis());
Monitors.recordQueueWaitTime(task.getTaskDefName(), task.getQueueWaitTime());
}
if (workflow.getStatus().isTerminal()) {
logger.warn("Workflow {} has been completed for {}/{}", workflow.getWorkflowId(), systemTask.getName(), task.getTaskId());
if (!task.getStatus().isTerminal()) {
task.setStatus(CANCELED);
}
executionDAO.updateTask(task);
queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId());
return;
}
if (task.getStatus().equals(SCHEDULED)) {
if (executionDAO.exceedsInProgressLimit(task)) {
//to do add a metric to record this
logger.warn("Concurrent Execution limited for {}:{}", taskId, task.getTaskDefName());
return;
}
if (task.getRateLimitPerFrequency() > 0 && executionDAO.exceedsRateLimitPerFrequency(task)) {
logger.warn("RateLimit Execution limited for {}:{}, limit:{}", taskId, task.getTaskDefName(), task.getRateLimitPerFrequency());
return;
}
}
logger.info("Executing {}/{}-{}", task.getTaskType(), task.getTaskId(), task.getStatus());
queueDAO.setUnackTimeout(QueueUtils.getQueueName(task), task.getTaskId(), systemTask.getRetryTimeInSecond() * 1000);
task.setPollCount(task.getPollCount() + 1);
executionDAO.updateTask(task);
switch (task.getStatus()) {
case SCHEDULED:
systemTask.start(workflow, task, this);
break;
case IN_PROGRESS:
systemTask.execute(workflow, task, this);
break;
default:
break;
}
if (!task.getStatus().isTerminal()) {
task.setCallbackAfterSeconds(unackTimeout);
}
updateTask(new TaskResult(task));
logger.info("Done Executing {}/{}-{} op={}", task.getTaskType(), task.getTaskId(), task.getStatus(), task.getOutputData().toString());
} catch (Exception e) {
logger.error("Error executing system task - {}, with id: {}", systemTask, taskId, e);
}
}
private void setTaskDomains(List<Task> tasks, Workflow wf) {
Map<String, String> taskToDomain = wf.getTaskToDomain();
if (taskToDomain != null) {
// Check if all tasks have the same domain "*"
String domainstr = taskToDomain.get("*");
if (domainstr != null) {
String[] domains = domainstr.split(",");
tasks.forEach(task -> {
// Filter out SystemTask
if (!WorkflowTask.Type.isSystemTask(task.getTaskType())) {
// Check which domain worker is polling
// Set the task domain
task.setDomain(getActiveDomain(task.getTaskType(), domains));
}
});
} else {
tasks.forEach(task -> {
if (!WorkflowTask.Type.isSystemTask(task.getTaskType())) {
String taskDomainstr = taskToDomain.get(task.getTaskType());
if (taskDomainstr != null) {
task.setDomain(getActiveDomain(task.getTaskType(), taskDomainstr.split(",")));
}
}
});
}
}
}
private String getActiveDomain(String taskType, String[] domains) {
// The domain list has to be ordered.
// In sequence check if any worker has polled for last 30 seconds, if so that isSystemTask the Active domain
return Arrays.stream(domains)
.map(domain -> executionDAO.getPollData(taskType, domain.trim()))
.filter(Objects::nonNull)
.filter(validateLastPolledTime)
.findFirst()
.map(PollData::getDomain)
.orElse(null);
}
private long getTaskDuration(long s, Task task) {
long duration = task.getEndTime() - task.getStartTime();
s += duration;
if (task.getRetriedTaskId() == null) {
return s;
}
return s + getTaskDuration(s, executionDAO.getTask(task.getRetriedTaskId()));
}
@VisibleForTesting
boolean scheduleTask(Workflow workflow, List<Task> tasks) {
if (tasks == null || tasks.isEmpty()) {
return false;
}
// Get the highest seq number
int count = workflow.getTasks().stream()
.mapToInt(Task::getSeq)
.max()
.orElse(0);
for (Task task : tasks) {
if (task.getSeq() == 0) { // Set only if the seq was not set
task.setSeq(++count);
}
}
// Save the tasks in the DAO
List<Task> created = executionDAO.createTasks(tasks);
List<Task> createdSystemTasks = created.stream()
.filter(isSystemTask)
.collect(Collectors.toList());
List<Task> tasksToBeQueued = created.stream()
.filter(isSystemTask.negate())
.collect(Collectors.toList());
boolean startedSystemTasks = false;
// Traverse through all the system tasks, start the sync tasks, in case of async queue the tasks
for (Task task : createdSystemTasks) {
WorkflowSystemTask workflowSystemTask = WorkflowSystemTask.get(task.getTaskType());
if (workflowSystemTask == null) {
throw new ApplicationException(NOT_FOUND, "No system task found by name " + task.getTaskType());
}
task.setStartTime(System.currentTimeMillis());
if (!workflowSystemTask.isAsync()) {
workflowSystemTask.start(workflow, task, this);
startedSystemTasks = true;
executionDAO.updateTask(task);
} else {
tasksToBeQueued.add(task);
}
}
addTaskToQueue(tasksToBeQueued);
return startedSystemTasks;
}
private void addTaskToQueue(final List<Task> tasks) {
for (Task task : tasks) {
addTaskToQueue(task);
}
}
private void terminate(final WorkflowDef def, final Workflow workflow, TerminateWorkflowException tw) {
if (!workflow.getStatus().isTerminal()) {
workflow.setStatus(tw.workflowStatus);
}
String failureWorkflow = def.getFailureWorkflow();
if (failureWorkflow != null) {
if (failureWorkflow.startsWith("$")) {
String[] paramPathComponents = failureWorkflow.split("\\.");
String name = paramPathComponents[2]; // name of the input parameter
failureWorkflow = (String) workflow.getInput().get(name);
}
}
if (tw.task != null) {
executionDAO.updateTask(tw.task);
}
terminateWorkflow(workflow, tw.getMessage(), failureWorkflow);
}
private boolean rerunWF(String workflowId, String taskId, Map<String, Object> taskInput,
Map<String, Object> workflowInput, String correlationId) {
// Get the workflow
Workflow workflow = executionDAO.getWorkflow(workflowId);
// If the task Id is null it implies that the entire workflow has to be rerun
if (taskId == null) {
// remove all tasks
workflow.getTasks().forEach(task -> executionDAO.removeTask(task.getTaskId()));
// Set workflow as RUNNING
workflow.setStatus(WorkflowStatus.RUNNING);
if (correlationId != null) {
workflow.setCorrelationId(correlationId);
}
if (workflowInput != null) {
workflow.setInput(workflowInput);
}
executionDAO.updateWorkflow(workflow);
decide(workflowId);
return true;
}
// Now iterate through the tasks and find the "specific" task
Task rerunFromTask = null;
for (Task task : workflow.getTasks()) {
if (task.getTaskId().equals(taskId)) {
rerunFromTask = task;
break;
} else {
// If not found look into sub workflows
if (task.getTaskType().equalsIgnoreCase(SubWorkflow.NAME)) {
String subWorkflowId = task.getInputData().get(SubWorkflow.SUB_WORKFLOW_ID).toString();
if (rerunWF(subWorkflowId, taskId, taskInput, null, null)) {
rerunFromTask = task;
break;
}
}
}
}
if (rerunFromTask != null) {
// Remove all tasks after the "rerunFromTask"
for (Task task : workflow.getTasks()) {
if (task.getSeq() > rerunFromTask.getSeq()) {
executionDAO.removeTask(task.getTaskId());
}
}
if (rerunFromTask.getTaskType().equalsIgnoreCase(SubWorkflow.NAME)) {
// if task is sub workflow set task as IN_PROGRESS
rerunFromTask.setStatus(IN_PROGRESS);
} else {
// Set the task to rerun as SCHEDULED
rerunFromTask.setStatus(SCHEDULED);
if (taskInput != null) {
rerunFromTask.setInputData(taskInput);
}
addTaskToQueue(rerunFromTask);
}
rerunFromTask.setExecuted(false);
executionDAO.updateTask(rerunFromTask);
// and set workflow as RUNNING
workflow.setStatus(WorkflowStatus.RUNNING);
if (correlationId != null) {
workflow.setCorrelationId(correlationId);
}
if (workflowInput != null) {
workflow.setInput(workflowInput);
}
executionDAO.updateWorkflow(workflow);
decide(workflowId);
return true;
}
return false;
}
}
|
core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java
|
/*
* Copyright 2016 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.conductor.core.execution;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.common.metadata.tasks.PollData;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.run.Workflow.WorkflowStatus;
import com.netflix.conductor.core.WorkflowContext;
import com.netflix.conductor.core.config.Configuration;
import com.netflix.conductor.core.execution.ApplicationException.Code;
import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome;
import com.netflix.conductor.core.execution.tasks.SubWorkflow;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.dao.ExecutionDAO;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.CANCELED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED_WITH_TERMINAL_ERROR;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.SCHEDULED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.SKIPPED;
import static com.netflix.conductor.common.metadata.tasks.Task.Status.valueOf;
import static com.netflix.conductor.core.execution.ApplicationException.Code.CONFLICT;
import static com.netflix.conductor.core.execution.ApplicationException.Code.INVALID_INPUT;
import static com.netflix.conductor.core.execution.ApplicationException.Code.NOT_FOUND;
import static java.util.Comparator.comparingInt;
import static java.util.stream.Collectors.groupingBy;
import static java.util.stream.Collectors.maxBy;
import static java.util.stream.Collectors.toSet;
/**
* @author Viren Workflow services provider interface
*/
@Trace
public class WorkflowExecutor {
private static final Logger logger = LoggerFactory.getLogger(WorkflowExecutor.class);
private final MetadataDAO metadataDAO;
private final ExecutionDAO executionDAO;
private final QueueDAO queueDAO;
private final DeciderService deciderService;
private final Configuration config;
private final ParametersUtils parametersUtils;
public static final String deciderQueue = "_deciderQueue";
private int activeWorkerLastPollnSecs;
@Inject
public WorkflowExecutor(DeciderService deciderService, MetadataDAO metadataDAO, ExecutionDAO executionDAO,
QueueDAO queueDAO, ParametersUtils parametersUtils, Configuration config) {
this.deciderService = deciderService;
this.metadataDAO = metadataDAO;
this.executionDAO = executionDAO;
this.queueDAO = queueDAO;
this.config = config;
this.parametersUtils = parametersUtils;
activeWorkerLastPollnSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10);
}
public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input) {
return startWorkflow(name, version, correlationId, input, null);
}
public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input, String event) {
return startWorkflow(name, version, input, correlationId, null, null, event);
}
public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input, String externalInputPayloadStoragePath, String event, Map<String, String> taskToDomain) {
return startWorkflow(name, version, input, externalInputPayloadStoragePath, correlationId, null, null, event, taskToDomain);
}
public String startWorkflow(String name, int version, Map<String, Object> input, String correlationId, String parentWorkflowId, String parentWorkflowTaskId, String event) {
return startWorkflow(name, version, input, null, correlationId, parentWorkflowId, parentWorkflowTaskId, event, null);
}
private final Predicate<PollData> validateLastPolledTime = pd -> pd.getLastPollTime() > System.currentTimeMillis() - (activeWorkerLastPollnSecs * 1000);
private final Predicate<Task> isSystemTask = task -> SystemTaskType.is(task.getTaskType());
private final Predicate<Task> isNonTerminalTask = task -> !task.getStatus().isTerminal();
public String startWorkflow(String workflowName, int workflowVersion, Map<String, Object> workflowInput,
String externalInputPayloadStoragePath, String correlationId, String parentWorkflowId,
String parentWorkflowTaskId, String event, Map<String, String> taskToDomain) {
// perform validations
validateWorkflow(workflowName, workflowVersion, workflowInput, externalInputPayloadStoragePath);
//A random UUID is assigned to the work flow instance
String workflowId = IDGenerator.generate();
// Persist the Workflow
Workflow workflow = new Workflow();
workflow.setWorkflowId(workflowId);
workflow.setCorrelationId(correlationId);
workflow.setWorkflowType(workflowName);
workflow.setVersion(workflowVersion);
workflow.setInput(workflowInput);
workflow.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath);
workflow.setStatus(WorkflowStatus.RUNNING);
workflow.setParentWorkflowId(parentWorkflowId);
workflow.setParentWorkflowTaskId(parentWorkflowTaskId);
workflow.setOwnerApp(WorkflowContext.get().getClientApp());
workflow.setCreateTime(System.currentTimeMillis());
workflow.setUpdatedBy(null);
workflow.setUpdateTime(null);
workflow.setEvent(event);
workflow.setTaskToDomain(taskToDomain);
executionDAO.createWorkflow(workflow);
logger.info("A new instance of workflow {} created with workflow id {}", workflowName, workflowId);
//then decide to see if anything needs to be done as part of the workflow
decide(workflowId);
return workflowId;
}
/**
* Performs validations for starting a workflow
*
* @throws ApplicationException if the validation fails
*/
private void validateWorkflow(String workflowName, int workflowVersion, Map<String, Object> workflowInput, String externalStoragePath) {
try {
//Check if the workflow definition is valid
WorkflowDef workflowDefinition = metadataDAO.get(workflowName, workflowVersion);
if (workflowDefinition == null) {
logger.error("There is no workflow defined with name {} and version {}", workflowName, workflowVersion);
throw new ApplicationException(Code.NOT_FOUND, "No such workflow defined. name=" + workflowName + ", version=" + workflowVersion);
}
//because everything else is a system defined task
Set<String> missingTaskDefs = workflowDefinition.all().stream()
.filter(task -> task.getType().equals(WorkflowTask.Type.SIMPLE.name()))
.map(WorkflowTask::getName)
.filter(task -> metadataDAO.getTaskDef(task) == null)
.collect(toSet());
if (!missingTaskDefs.isEmpty()) {
logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefs);
throw new ApplicationException(INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefs);
}
//Check if the input to the workflow is not null
if (workflowInput == null && StringUtils.isBlank(externalStoragePath)) {
logger.error("The input for the workflow {} cannot be NULL", workflowName);
throw new ApplicationException(INVALID_INPUT, "NULL input passed when starting workflow");
}
} catch (Exception e) {
Monitors.recordWorkflowStartError(workflowName, WorkflowContext.get().getClientApp());
throw e;
}
}
public String resetCallbacksForInProgressTasks(String workflowId) {
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
if (workflow.getStatus().isTerminal()) {
throw new ApplicationException(CONFLICT, "Workflow is completed. status=" + workflow.getStatus());
}
// Get tasks that are in progress and have callbackAfterSeconds > 0
// and set the callbackAfterSeconds to 0;
for (Task t : workflow.getTasks()) {
if (t.getStatus().equals(IN_PROGRESS) &&
t.getCallbackAfterSeconds() > 0) {
if (queueDAO.setOffsetTime(QueueUtils.getQueueName(t), t.getTaskId(), 0)) {
t.setCallbackAfterSeconds(0);
executionDAO.updateTask(t);
}
}
}
return workflowId;
}
public String rerun(RerunWorkflowRequest request) {
Preconditions.checkNotNull(request.getReRunFromWorkflowId(), "reRunFromWorkflowId is missing");
if (!rerunWF(request.getReRunFromWorkflowId(), request.getReRunFromTaskId(), request.getTaskInput(),
request.getWorkflowInput(), request.getCorrelationId())) {
throw new ApplicationException(INVALID_INPUT, "Task " + request.getReRunFromTaskId() + " not found");
}
return request.getReRunFromWorkflowId();
}
public void rewind(String workflowId) {
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
if (!workflow.getStatus().isTerminal()) {
throw new ApplicationException(CONFLICT, "Workflow is still running. status=" + workflow.getStatus());
}
WorkflowDef workflowDef = metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion());
if (!workflowDef.isRestartable() && workflow.getStatus().equals(WorkflowStatus.COMPLETED)) { // Can only restart non completed workflows when the configuration is set to false
throw new ApplicationException(CONFLICT, String.format("WorkflowId: %s is an instance of WorkflowDef: %s and version: %d and is non restartable",
workflowId, workflowDef.getName(), workflowDef.getVersion()));
}
// Remove all the tasks...
workflow.getTasks().forEach(t -> executionDAO.removeTask(t.getTaskId()));
workflow.getTasks().clear();
workflow.setReasonForIncompletion(null);
workflow.setStartTime(System.currentTimeMillis());
workflow.setEndTime(0);
// Change the status to running
workflow.setStatus(WorkflowStatus.RUNNING);
executionDAO.updateWorkflow(workflow);
decide(workflowId);
}
/**
* Gets the last instance of each failed task and reschedule each
* Gets all cancelled tasks and schedule all of them except JOIN (join should change status to INPROGRESS)
* Switch workflow back to RUNNING status and aall decider.
* @param workflowId
*/
public void retry(String workflowId) {
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
if (!workflow.getStatus().isTerminal()) {
throw new ApplicationException(CONFLICT, "Workflow is still running. status=" + workflow.getStatus());
}
if (workflow.getTasks().isEmpty()) {
throw new ApplicationException(CONFLICT, "Workflow has not started yet");
}
List<Task> failedTasks = getFailedTasksToRetry(workflow);
List<Task> cancelledTasks = workflow.getTasks().stream()
.filter(x->CANCELED.equals(x.getStatus())).collect(Collectors.toList());
if (failedTasks.isEmpty()) {
throw new ApplicationException(CONFLICT,
"There are no failed tasks! Use restart if you want to attempt entire workflow execution again.");
}
List<Task> rescheduledTasks = new ArrayList<>();
failedTasks.forEach(failedTask -> {
rescheduledTasks.add(taskToBeRescheduled(failedTask));
});
// Reschedule the cancelled task but if the join is cancelled set that to in progress
cancelledTasks.forEach(cancelledTask -> {
if (cancelledTask.getTaskType().equalsIgnoreCase(WorkflowTask.Type.JOIN.toString())) {
cancelledTask.setStatus(IN_PROGRESS);
executionDAO.updateTask(cancelledTask);
} else {
rescheduledTasks.add(taskToBeRescheduled(cancelledTask));
}
});
scheduleTask(workflow, rescheduledTasks);
workflow.setStatus(WorkflowStatus.RUNNING);
executionDAO.updateWorkflow(workflow);
executionDAO.updateTasks(workflow.getTasks());
decide(workflowId);
}
/**
* Get all failed and cancelled tasks.
* for failed tasks - get one for each task reference name(latest failed using seq id)
* @param workflow
* @return list of latest failed tasks, one for each task reference reference type.
*/
@VisibleForTesting
List<Task> getFailedTasksToRetry(Workflow workflow) {
return workflow.getTasks().stream()
.filter(x -> FAILED.equals(x.getStatus()))
.collect(groupingBy(Task::getReferenceTaskName, maxBy(comparingInt(Task::getSeq))))
.values().stream().filter(Optional::isPresent).map(Optional::get).collect(Collectors.toList());
}
/**
* Reschedule a task
* @param task failed or cancelled task
* @return new instance of a task with "SCHEDULED" status
*/
private Task taskToBeRescheduled(Task task) {
Task taskToBeRetried = task.copy();
taskToBeRetried.setTaskId(IDGenerator.generate());
taskToBeRetried.setRetriedTaskId(task.getTaskId());
taskToBeRetried.setStatus(SCHEDULED);
taskToBeRetried.setRetryCount(task.getRetryCount() + 1);
// update the failed task in the DAO
task.setRetried(true);
executionDAO.updateTask(task);
return taskToBeRetried;
}
public Task getPendingTaskByWorkflow(String taskReferenceName, String workflowId) {
return executionDAO.getTasksForWorkflow(workflowId).stream()
.filter(isNonTerminalTask)
.filter(task -> task.getReferenceTaskName().equals(taskReferenceName))
.findFirst() // There can only be one task by a given reference name running at a time.
.orElse(null);
}
@VisibleForTesting
void completeWorkflow(Workflow wf) {
logger.debug("Completing workflow execution for {}", wf.getWorkflowId());
Workflow workflow = executionDAO.getWorkflow(wf.getWorkflowId(), false);
if (workflow.getStatus().equals(WorkflowStatus.COMPLETED)) {
executionDAO.removeFromPendingWorkflow(workflow.getWorkflowType(), workflow.getWorkflowId());
logger.info("Workflow has already been completed. Current status={}, workflowId= {}", workflow.getStatus(), wf.getWorkflowId());
return;
}
if (workflow.getStatus().isTerminal()) {
String msg = "Workflow has already been completed. Current status " + workflow.getStatus();
throw new ApplicationException(CONFLICT, msg);
}
deciderService.updateWorkflowOutput(wf, null);
workflow.setStatus(WorkflowStatus.COMPLETED);
workflow.setOutput(wf.getOutput());
workflow.setExternalOutputPayloadStoragePath(wf.getExternalOutputPayloadStoragePath());
executionDAO.updateWorkflow(workflow);
logger.debug("Completed workflow execution for {}", wf.getWorkflowId());
executionDAO.updateTasks(wf.getTasks());
// If the following task, for some reason fails, the sweep will take care of this again!
if (workflow.getParentWorkflowId() != null) {
Workflow parent = executionDAO.getWorkflow(workflow.getParentWorkflowId(), false);
WorkflowDef parentDef = metadataDAO.get(parent.getWorkflowType(), parent.getVersion());
logger.debug("Completed sub-workflow {}, deciding parent workflow {}", wf.getWorkflowId(), wf.getParentWorkflowId());
Task parentWorkflowTask = executionDAO.getTask(workflow.getParentWorkflowTaskId());
// If parent is FAILED and the sub workflow task in parent is FAILED, we want to resume them
if (StringUtils.isBlank(parentDef.getFailureWorkflow()) && parent.getStatus() == WorkflowStatus.FAILED && parentWorkflowTask.getStatus() == FAILED) {
parentWorkflowTask.setStatus(IN_PROGRESS);
executionDAO.updateTask(parentWorkflowTask);
parent.setStatus(WorkflowStatus.RUNNING);
executionDAO.updateWorkflow(parent);
}
decide(parent.getWorkflowId());
}
Monitors.recordWorkflowCompletion(workflow.getWorkflowType(), workflow.getEndTime() - workflow.getStartTime(), wf.getOwnerApp());
queueDAO.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue
logger.debug("Removed workflow {} from decider queue", wf.getWorkflowId());
}
public void terminateWorkflow(String workflowId, String reason) {
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
workflow.setStatus(WorkflowStatus.TERMINATED);
terminateWorkflow(workflow, reason, null);
}
public void terminateWorkflow(Workflow workflow, String reason, String failureWorkflow) {
if (!workflow.getStatus().isTerminal()) {
workflow.setStatus(WorkflowStatus.TERMINATED);
}
deciderService.updateWorkflowOutput(workflow, null);
String workflowId = workflow.getWorkflowId();
workflow.setReasonForIncompletion(reason);
executionDAO.updateWorkflow(workflow);
List<Task> tasks = workflow.getTasks();
for (Task task : tasks) {
if (!task.getStatus().isTerminal()) {
// Cancel the ones which are not completed yet....
task.setStatus(CANCELED);
if (isSystemTask.test(task)) {
WorkflowSystemTask workflowSystemTask = WorkflowSystemTask.get(task.getTaskType());
workflowSystemTask.cancel(workflow, task, this);
}
executionDAO.updateTask(task);
}
// And remove from the task queue if they were there
queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId());
}
// If the following lines, for some reason fails, the sweep will take
// care of this again!
if (workflow.getParentWorkflowId() != null) {
Workflow parent = executionDAO.getWorkflow(workflow.getParentWorkflowId(), false);
decide(parent.getWorkflowId());
}
if (!StringUtils.isBlank(failureWorkflow)) {
Map<String, Object> input = new HashMap<>(workflow.getInput());
input.put("workflowId", workflowId);
input.put("reason", reason);
input.put("failureStatus", workflow.getStatus().toString());
try {
WorkflowDef latestFailureWorkflow = metadataDAO.getLatest(failureWorkflow);
String failureWFId = startWorkflow(failureWorkflow, latestFailureWorkflow.getVersion(), workflowId, input);
workflow.getOutput().put("conductor.failure_workflow", failureWFId);
} catch (Exception e) {
logger.error("Failed to start error workflow", e);
workflow.getOutput().put("conductor.failure_workflow", "Error workflow " + failureWorkflow + " failed to start. reason: " + e.getMessage());
Monitors.recordWorkflowStartError(failureWorkflow, WorkflowContext.get().getClientApp());
}
}
queueDAO.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue
executionDAO.removeFromPendingWorkflow(workflow.getWorkflowType(), workflow.getWorkflowId());
// Send to atlas
Monitors.recordWorkflowTermination(workflow.getWorkflowType(), workflow.getStatus(), workflow.getOwnerApp());
}
public void updateTask(TaskResult taskResult) {
if (taskResult == null) {
logger.info("null task given for update");
throw new ApplicationException(Code.INVALID_INPUT, "Task object is null");
}
String workflowId = taskResult.getWorkflowInstanceId();
Workflow workflowInstance = executionDAO.getWorkflow(workflowId);
Task task = executionDAO.getTask(taskResult.getTaskId());
logger.debug("Task: {} belonging to Workflow {} being updated", task, workflowInstance);
String taskQueueName = QueueUtils.getQueueName(task);
if (workflowInstance.getStatus().isTerminal()) {
// Workflow is in terminal state
queueDAO.remove(taskQueueName, taskResult.getTaskId());
logger.debug("Workflow: {} is in terminal state Task: {} removed from Queue: {} during update task", workflowInstance, task, taskQueueName);
if (!task.getStatus().isTerminal()) {
task.setStatus(COMPLETED);
}
task.setOutputData(taskResult.getOutputData());
task.setReasonForIncompletion(taskResult.getReasonForIncompletion());
task.setWorkerId(taskResult.getWorkerId());
executionDAO.updateTask(task);
String msg = String.format("Workflow %s is already completed as %s, task=%s, reason=%s",
workflowInstance.getWorkflowId(), workflowInstance.getStatus(), task.getTaskType(), workflowInstance.getReasonForIncompletion());
logger.info(msg);
Monitors.recordUpdateConflict(task.getTaskType(), workflowInstance.getWorkflowType(), workflowInstance.getStatus());
return;
}
if (task.getStatus().isTerminal()) {
// Task was already updated....
queueDAO.remove(taskQueueName, taskResult.getTaskId());
logger.debug("Task: {} is in terminal state and is removed from the queue {} ", task, taskQueueName);
String msg = String.format("Task is already completed as %s@%d, workflow status=%s, workflowId=%s, taskId=%s",
task.getStatus(), task.getEndTime(), workflowInstance.getStatus(), workflowInstance.getWorkflowId(), task.getTaskId());
logger.info(msg);
Monitors.recordUpdateConflict(task.getTaskType(), workflowInstance.getWorkflowType(), task.getStatus());
return;
}
task.setStatus(valueOf(taskResult.getStatus().name()));
task.setOutputData(taskResult.getOutputData());
task.setExternalOutputPayloadStoragePath(taskResult.getExternalOutputPayloadStoragePath());
task.setReasonForIncompletion(taskResult.getReasonForIncompletion());
task.setWorkerId(taskResult.getWorkerId());
task.setCallbackAfterSeconds(taskResult.getCallbackAfterSeconds());
if (task.getStatus().isTerminal()) {
task.setEndTime(System.currentTimeMillis());
}
executionDAO.updateTask(task);
//If the task has failed update the failed task reference name in the workflow.
//This gives the ability to look at workflow and see what tasks have failed at a high level.
if (FAILED.equals(task.getStatus()) || FAILED_WITH_TERMINAL_ERROR.equals(task.getStatus())) {
workflowInstance.getFailedReferenceTaskNames().add(task.getReferenceTaskName());
executionDAO.updateWorkflow(workflowInstance);
logger.debug("Task: {} has a {} status and the Workflow has been updated with failed task reference", task, task.getStatus());
}
taskResult.getLogs().forEach(taskExecLog -> taskExecLog.setTaskId(task.getTaskId()));
executionDAO.addTaskExecLog(taskResult.getLogs());
switch (task.getStatus()) {
case COMPLETED:
case CANCELED:
case FAILED:
case FAILED_WITH_TERMINAL_ERROR:
queueDAO.remove(taskQueueName, taskResult.getTaskId());
logger.debug("Task: {} removed from taskQueue: {} since the task status is {}", task, taskQueueName, task.getStatus().name());
break;
case IN_PROGRESS:
// put it back in queue based on callbackAfterSeconds
long callBack = taskResult.getCallbackAfterSeconds();
queueDAO.remove(taskQueueName, task.getTaskId());
logger.debug("Task: {} removed from taskQueue: {} since the task status is {}", task, taskQueueName, task.getStatus().name());
queueDAO.push(taskQueueName, task.getTaskId(), callBack); // Milliseconds
logger.debug("Task: {} pushed back to taskQueue: {} since the task status is {} with callbackAfterSeconds: {}", task, taskQueueName, task.getStatus().name(), callBack);
break;
default:
break;
}
decide(workflowId);
if (task.getStatus().isTerminal()) {
long duration = getTaskDuration(0, task);
long lastDuration = task.getEndTime() - task.getStartTime();
Monitors.recordTaskExecutionTime(task.getTaskDefName(), duration, true, task.getStatus());
Monitors.recordTaskExecutionTime(task.getTaskDefName(), lastDuration, false, task.getStatus());
}
}
public List<Task> getTasks(String taskType, String startKey, int count) {
return executionDAO.getTasks(taskType, startKey, count);
}
public List<Workflow> getRunningWorkflows(String workflowName) {
return executionDAO.getPendingWorkflowsByType(workflowName);
}
public List<String> getWorkflows(String name, Integer version, Long startTime, Long endTime) {
List<Workflow> workflowsByType = executionDAO.getWorkflowsByType(name, startTime, endTime);
return workflowsByType.stream()
.filter(workflow -> workflow.getVersion() == version)
.map(Workflow::getWorkflowId)
.collect(Collectors.toList());
}
public List<String> getRunningWorkflowIds(String workflowName) {
return executionDAO.getRunningWorkflowIds(workflowName);
}
/**
* @param workflowId ID of the workflow to evaluate the state for
* @return true if the workflow is in terminal state, false otherwise.
*/
public boolean decide(String workflowId) {
// If it is a new workflow, the tasks will be still empty even though include tasks is true
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
WorkflowDef workflowDef = metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion());
try {
DeciderOutcome outcome = deciderService.decide(workflow, workflowDef);
if (outcome.isComplete) {
completeWorkflow(workflow);
return true;
}
List<Task> tasksToBeScheduled = outcome.tasksToBeScheduled;
setTaskDomains(tasksToBeScheduled, workflow);
List<Task> tasksToBeUpdated = outcome.tasksToBeUpdated;
List<Task> tasksToBeRequeued = outcome.tasksToBeRequeued;
boolean stateChanged = false;
if (!tasksToBeRequeued.isEmpty()) {
addTaskToQueue(tasksToBeRequeued);
}
workflow.getTasks().addAll(tasksToBeScheduled);
for (Task task : tasksToBeScheduled) {
if (isSystemTask.and(isNonTerminalTask).test(task)) {
WorkflowSystemTask workflowSystemTask = WorkflowSystemTask.get(task.getTaskType());
if (!workflowSystemTask.isAsync() && workflowSystemTask.execute(workflow, task, this)) {
tasksToBeUpdated.add(task);
stateChanged = true;
}
}
}
stateChanged = scheduleTask(workflow, tasksToBeScheduled) || stateChanged;
if (!outcome.tasksToBeUpdated.isEmpty() || !outcome.tasksToBeScheduled.isEmpty()) {
executionDAO.updateTasks(tasksToBeUpdated);
executionDAO.updateWorkflow(workflow);
queueDAO.push(deciderQueue, workflow.getWorkflowId(), config.getSweepFrequency());
}
if (stateChanged) {
decide(workflowId);
}
} catch (TerminateWorkflowException twe) {
logger.info("Execution terminated of workflow: {} of type: {}", workflowId, workflowDef.getName(), twe);
terminate(workflowDef, workflow, twe);
return true;
} catch (RuntimeException e) {
logger.error("Error deciding workflow: {}", workflowId, e);
throw e;
}
return false;
}
public void pauseWorkflow(String workflowId) {
WorkflowStatus status = WorkflowStatus.PAUSED;
Workflow workflow = executionDAO.getWorkflow(workflowId, false);
if (workflow.getStatus().isTerminal()) {
throw new ApplicationException(CONFLICT, "Workflow id " + workflowId + " has ended, status cannot be updated.");
}
if (workflow.getStatus().equals(status)) {
return; //Already paused!
}
workflow.setStatus(status);
executionDAO.updateWorkflow(workflow);
}
public void resumeWorkflow(String workflowId) {
Workflow workflow = executionDAO.getWorkflow(workflowId, false);
if (!workflow.getStatus().equals(WorkflowStatus.PAUSED)) {
throw new IllegalStateException("The workflow " + workflowId + " is PAUSED so cannot resume");
}
workflow.setStatus(WorkflowStatus.RUNNING);
executionDAO.updateWorkflow(workflow);
decide(workflowId);
}
public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) {
Workflow wf = executionDAO.getWorkflow(workflowId, true);
// If the wf is not running then cannot skip any task
if (!wf.getStatus().equals(WorkflowStatus.RUNNING)) {
String errorMsg = String.format("The workflow %s is not running so the task referenced by %s cannot be skipped", workflowId, taskReferenceName);
throw new IllegalStateException(errorMsg);
}
// Check if the reference name is as per the workflowdef
WorkflowDef wfd = metadataDAO.get(wf.getWorkflowType(), wf.getVersion());
WorkflowTask wft = wfd.getTaskByRefName(taskReferenceName);
if (wft == null) {
String errorMsg = String.format("The task referenced by %s does not exist in the WorkflowDefinition %s", taskReferenceName, wf.getWorkflowType());
throw new IllegalStateException(errorMsg);
}
// If the task is already started the again it cannot be skipped
wf.getTasks().forEach(task -> {
if (task.getReferenceTaskName().equals(taskReferenceName)) {
String errorMsg = String.format("The task referenced %s has already been processed, cannot be skipped", taskReferenceName);
throw new IllegalStateException(errorMsg);
}
});
// Now create a "SKIPPED" task for this workflow
Task theTask = new Task();
theTask.setTaskId(IDGenerator.generate());
theTask.setReferenceTaskName(taskReferenceName);
theTask.setWorkflowInstanceId(workflowId);
theTask.setStatus(SKIPPED);
theTask.setTaskType(wft.getName());
theTask.setCorrelationId(wf.getCorrelationId());
if (skipTaskRequest != null) {
theTask.setInputData(skipTaskRequest.getTaskInput());
theTask.setOutputData(skipTaskRequest.getTaskOutput());
}
executionDAO.createTasks(Collections.singletonList(theTask));
decide(workflowId);
}
public Workflow getWorkflow(String workflowId, boolean includeTasks) {
return executionDAO.getWorkflow(workflowId, includeTasks);
}
private void addTaskToQueue(Task task) {
// put in queue
String taskQueueName = QueueUtils.getQueueName(task);
queueDAO.remove(taskQueueName, task.getTaskId());
if (task.getCallbackAfterSeconds() > 0) {
queueDAO.push(taskQueueName, task.getTaskId(), task.getCallbackAfterSeconds());
} else {
queueDAO.push(taskQueueName, task.getTaskId(), 0);
}
logger.debug("Added task {} to queue {} with call back seconds {}", task, taskQueueName, task.getCallbackAfterSeconds());
}
//Executes the async system task
public void executeSystemTask(WorkflowSystemTask systemTask, String taskId, int unackTimeout) {
try {
Task task = executionDAO.getTask(taskId);
logger.info("Task: {} fetched from execution DAO for taskId: {}", task, taskId);
if (task.getStatus().isTerminal()) {
//Tune the SystemTaskWorkerCoordinator's queues - if the queue size is very big this can happen!
logger.info("Task {}/{} was already completed.", task.getTaskType(), task.getTaskId());
queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId());
return;
}
String workflowId = task.getWorkflowInstanceId();
Workflow workflow = executionDAO.getWorkflow(workflowId, true);
if (task.getStartTime() == 0) {
task.setStartTime(System.currentTimeMillis());
Monitors.recordQueueWaitTime(task.getTaskDefName(), task.getQueueWaitTime());
}
if (workflow.getStatus().isTerminal()) {
logger.warn("Workflow {} has been completed for {}/{}", workflow.getWorkflowId(), systemTask.getName(), task.getTaskId());
if (!task.getStatus().isTerminal()) {
task.setStatus(CANCELED);
}
executionDAO.updateTask(task);
queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId());
return;
}
if (task.getStatus().equals(SCHEDULED)) {
if (executionDAO.exceedsInProgressLimit(task)) {
//to do add a metric to record this
logger.warn("Concurrent Execution limited for {}:{}", taskId, task.getTaskDefName());
return;
}
if (task.getRateLimitPerFrequency() > 0 && executionDAO.exceedsRateLimitPerFrequency(task)) {
logger.warn("RateLimit Execution limited for {}:{}, limit:{}", taskId, task.getTaskDefName(), task.getRateLimitPerFrequency());
return;
}
}
logger.info("Executing {}/{}-{}", task.getTaskType(), task.getTaskId(), task.getStatus());
queueDAO.setUnackTimeout(QueueUtils.getQueueName(task), task.getTaskId(), systemTask.getRetryTimeInSecond() * 1000);
task.setPollCount(task.getPollCount() + 1);
executionDAO.updateTask(task);
switch (task.getStatus()) {
case SCHEDULED:
systemTask.start(workflow, task, this);
break;
case IN_PROGRESS:
systemTask.execute(workflow, task, this);
break;
default:
break;
}
if (!task.getStatus().isTerminal()) {
task.setCallbackAfterSeconds(unackTimeout);
}
updateTask(new TaskResult(task));
logger.info("Done Executing {}/{}-{} op={}", task.getTaskType(), task.getTaskId(), task.getStatus(), task.getOutputData().toString());
} catch (Exception e) {
logger.error("Error executing system task - {}, with id: {}", systemTask, taskId, e);
}
}
private void setTaskDomains(List<Task> tasks, Workflow wf) {
Map<String, String> taskToDomain = wf.getTaskToDomain();
if (taskToDomain != null) {
// Check if all tasks have the same domain "*"
String domainstr = taskToDomain.get("*");
if (domainstr != null) {
String[] domains = domainstr.split(",");
tasks.forEach(task -> {
// Filter out SystemTask
if (!WorkflowTask.Type.isSystemTask(task.getTaskType())) {
// Check which domain worker is polling
// Set the task domain
task.setDomain(getActiveDomain(task.getTaskType(), domains));
}
});
} else {
tasks.forEach(task -> {
if (!WorkflowTask.Type.isSystemTask(task.getTaskType())) {
String taskDomainstr = taskToDomain.get(task.getTaskType());
if (taskDomainstr != null) {
task.setDomain(getActiveDomain(task.getTaskType(), taskDomainstr.split(",")));
}
}
});
}
}
}
private String getActiveDomain(String taskType, String[] domains) {
// The domain list has to be ordered.
// In sequence check if any worker has polled for last 30 seconds, if so that isSystemTask the Active domain
return Arrays.stream(domains)
.map(domain -> executionDAO.getPollData(taskType, domain.trim()))
.filter(Objects::nonNull)
.filter(validateLastPolledTime)
.findFirst()
.map(PollData::getDomain)
.orElse(null);
}
private long getTaskDuration(long s, Task task) {
long duration = task.getEndTime() - task.getStartTime();
s += duration;
if (task.getRetriedTaskId() == null) {
return s;
}
return s + getTaskDuration(s, executionDAO.getTask(task.getRetriedTaskId()));
}
@VisibleForTesting
boolean scheduleTask(Workflow workflow, List<Task> tasks) {
if (tasks == null || tasks.isEmpty()) {
return false;
}
// Get the highest seq number
int count = workflow.getTasks().stream()
.mapToInt(Task::getSeq)
.max()
.orElse(0);
for (Task task : tasks) {
if (task.getSeq() == 0) { // Set only if the seq was not set
task.setSeq(++count);
}
}
// Save the tasks in the DAO
List<Task> created = executionDAO.createTasks(tasks);
List<Task> createdSystemTasks = created.stream()
.filter(isSystemTask)
.collect(Collectors.toList());
List<Task> tasksToBeQueued = created.stream()
.filter(isSystemTask.negate())
.collect(Collectors.toList());
boolean startedSystemTasks = false;
// Traverse through all the system tasks, start the sync tasks, in case of async queue the tasks
for (Task task : createdSystemTasks) {
WorkflowSystemTask workflowSystemTask = WorkflowSystemTask.get(task.getTaskType());
if (workflowSystemTask == null) {
throw new ApplicationException(NOT_FOUND, "No system task found by name " + task.getTaskType());
}
task.setStartTime(System.currentTimeMillis());
if (!workflowSystemTask.isAsync()) {
workflowSystemTask.start(workflow, task, this);
startedSystemTasks = true;
executionDAO.updateTask(task);
} else {
tasksToBeQueued.add(task);
}
}
addTaskToQueue(tasksToBeQueued);
return startedSystemTasks;
}
private void addTaskToQueue(final List<Task> tasks) {
for (Task task : tasks) {
addTaskToQueue(task);
}
}
private void terminate(final WorkflowDef def, final Workflow workflow, TerminateWorkflowException tw) {
if (!workflow.getStatus().isTerminal()) {
workflow.setStatus(tw.workflowStatus);
}
String failureWorkflow = def.getFailureWorkflow();
if (failureWorkflow != null) {
if (failureWorkflow.startsWith("$")) {
String[] paramPathComponents = failureWorkflow.split("\\.");
String name = paramPathComponents[2]; // name of the input parameter
failureWorkflow = (String) workflow.getInput().get(name);
}
}
if (tw.task != null) {
executionDAO.updateTask(tw.task);
}
terminateWorkflow(workflow, tw.getMessage(), failureWorkflow);
}
private boolean rerunWF(String workflowId, String taskId, Map<String, Object> taskInput,
Map<String, Object> workflowInput, String correlationId) {
// Get the workflow
Workflow workflow = executionDAO.getWorkflow(workflowId);
// If the task Id is null it implies that the entire workflow has to be rerun
if (taskId == null) {
// remove all tasks
workflow.getTasks().forEach(task -> executionDAO.removeTask(task.getTaskId()));
// Set workflow as RUNNING
workflow.setStatus(WorkflowStatus.RUNNING);
if (correlationId != null) {
workflow.setCorrelationId(correlationId);
}
if (workflowInput != null) {
workflow.setInput(workflowInput);
}
executionDAO.updateWorkflow(workflow);
decide(workflowId);
return true;
}
// Now iterate through the tasks and find the "specific" task
Task rerunFromTask = null;
for (Task task : workflow.getTasks()) {
if (task.getTaskId().equals(taskId)) {
rerunFromTask = task;
break;
} else {
// If not found look into sub workflows
if (task.getTaskType().equalsIgnoreCase(SubWorkflow.NAME)) {
String subWorkflowId = task.getInputData().get(SubWorkflow.SUB_WORKFLOW_ID).toString();
if (rerunWF(subWorkflowId, taskId, taskInput, null, null)) {
rerunFromTask = task;
break;
}
}
}
}
if (rerunFromTask != null) {
// Remove all tasks after the "rerunFromTask"
for (Task task : workflow.getTasks()) {
if (task.getSeq() > rerunFromTask.getSeq()) {
executionDAO.removeTask(task.getTaskId());
}
}
if (rerunFromTask.getTaskType().equalsIgnoreCase(SubWorkflow.NAME)) {
// if task is sub workflow set task as IN_PROGRESS
rerunFromTask.setStatus(IN_PROGRESS);
} else {
// Set the task to rerun as SCHEDULED
rerunFromTask.setStatus(SCHEDULED);
if (taskInput != null) {
rerunFromTask.setInputData(taskInput);
}
addTaskToQueue(rerunFromTask);
}
rerunFromTask.setExecuted(false);
executionDAO.updateTask(rerunFromTask);
// and set workflow as RUNNING
workflow.setStatus(WorkflowStatus.RUNNING);
if (correlationId != null) {
workflow.setCorrelationId(correlationId);
}
if (workflowInput != null) {
workflow.setInput(workflowInput);
}
executionDAO.updateWorkflow(workflow);
decide(workflowId);
return true;
}
return false;
}
}
|
fix error message
|
core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java
|
fix error message
|
|
Java
|
apache-2.0
|
22c35f4da0b944f9afaa3d7bafafe2b2755edec5
| 0
|
ernestp/consulo,caot/intellij-community,apixandru/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,da1z/intellij-community,hurricup/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,apixandru/intellij-community,adedayo/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,fitermay/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,da1z/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,consulo/consulo,fengbaicanhe/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,vladmm/intellij-community,fitermay/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,joewalnes/idea-community,adedayo/intellij-community,amith01994/intellij-community,supersven/intellij-community,signed/intellij-community,akosyakov/intellij-community,ernestp/consulo,blademainer/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,apixandru/intellij-community,dslomov/intellij-community,ibinti/intellij-community,hurricup/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,asedunov/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,blademainer/intellij-community,dslomov/intellij-community,amith01994/intellij-community,semonte/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,izonder/intellij-community,consulo/consulo,semonte/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,ernestp/consulo,FHannes/intellij-community,samthor/intellij-community,youdonghai/intellij-community,ernestp/consulo,izonder/intellij-community,robovm/robovm-studio,fitermay/intellij-community,apixandru/intellij-community,dslomov/intellij-community,joewalnes/idea-community,semonte/intellij-community,xfournet/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,da1z/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,kool79/intellij-community,caot/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,allotria/intellij-community,allotria/intellij-community,holmes/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,kdwink/intellij-community,caot/intellij-community,gnuhub/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,fnouama/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,petteyg/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,asedunov/intellij-community,kool79/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,fnouama/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,joewalnes/idea-community,slisson/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,kool79/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,samthor/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,suncycheng/intellij-community,slisson/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,consulo/consulo,fengbaicanhe/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,signed/intellij-community,retomerz/intellij-community,FHannes/intellij-community,holmes/intellij-community,samthor/intellij-community,xfournet/intellij-community,izonder/intellij-community,izonder/intellij-community,kdwink/intellij-community,jagguli/intellij-community,caot/intellij-community,asedunov/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,semonte/intellij-community,caot/intellij-community,youdonghai/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,fnouama/intellij-community,petteyg/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,izonder/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,robovm/robovm-studio,vladmm/intellij-community,diorcety/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,slisson/intellij-community,samthor/intellij-community,allotria/intellij-community,Lekanich/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,holmes/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,kool79/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,supersven/intellij-community,clumsy/intellij-community,fnouama/intellij-community,holmes/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,slisson/intellij-community,izonder/intellij-community,semonte/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,joewalnes/idea-community,fengbaicanhe/intellij-community,supersven/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,joewalnes/idea-community,ol-loginov/intellij-community,allotria/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,slisson/intellij-community,FHannes/intellij-community,retomerz/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,gnuhub/intellij-community,semonte/intellij-community,asedunov/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,da1z/intellij-community,blademainer/intellij-community,joewalnes/idea-community,wreckJ/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,signed/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,signed/intellij-community,suncycheng/intellij-community,caot/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,petteyg/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,kdwink/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,signed/intellij-community,da1z/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,caot/intellij-community,kool79/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,kool79/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,consulo/consulo,suncycheng/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,consulo/consulo,vladmm/intellij-community,Lekanich/intellij-community,kool79/intellij-community,vladmm/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,signed/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,vvv1559/intellij-community,joewalnes/idea-community,jagguli/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,supersven/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,clumsy/intellij-community,slisson/intellij-community,blademainer/intellij-community,kool79/intellij-community,orekyuu/intellij-community,adedayo/intellij-community,apixandru/intellij-community,samthor/intellij-community,tmpgit/intellij-community,signed/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,holmes/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,apixandru/intellij-community,petteyg/intellij-community,apixandru/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,ryano144/intellij-community,jagguli/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,semonte/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,da1z/intellij-community,fitermay/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,xfournet/intellij-community,diorcety/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,caot/intellij-community,joewalnes/idea-community,ryano144/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,mglukhikh/intellij-community,ahb0327/intellij-community,consulo/consulo,holmes/intellij-community,samthor/intellij-community,robovm/robovm-studio,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,MichaelNedzelsky/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,apixandru/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,kdwink/intellij-community,ernestp/consulo,Lekanich/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,clumsy/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,holmes/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,orekyuu/intellij-community,slisson/intellij-community,tmpgit/intellij-community,samthor/intellij-community,da1z/intellij-community,tmpgit/intellij-community,holmes/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,supersven/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,apixandru/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,izonder/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,slisson/intellij-community,adedayo/intellij-community,amith01994/intellij-community,kdwink/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,allotria/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,dslomov/intellij-community,izonder/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,robovm/robovm-studio,ibinti/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,retomerz/intellij-community,petteyg/intellij-community,joewalnes/idea-community,vladmm/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,samthor/intellij-community,jagguli/intellij-community,allotria/intellij-community,apixandru/intellij-community,apixandru/intellij-community,vladmm/intellij-community,fitermay/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,ernestp/consulo,allotria/intellij-community,ibinti/intellij-community,FHannes/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,signed/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,slisson/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,semonte/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,adedayo/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,nicolargo/intellij-community
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.wm.impl.status;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.ui.LafManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.progress.TaskInfo;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.ui.popup.IconButton;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.wm.impl.content.GraphicsConfig;
import com.intellij.ui.InplaceButton;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.ui.components.panels.Wrapper;
import com.intellij.util.ui.UIUtil;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
public class InlineProgressIndicator extends ProgressIndicatorBase implements Disposable {
private final FixedHeightLabel myText = new FixedHeightLabel();
private final FixedHeightLabel myText2 = new FixedHeightLabel();
private MyProgressBar myProgress;
private JPanel myComponent;
private final InplaceButton myCancelButton;
private final boolean myCompact;
private TaskInfo myInfo;
private final FixedHeightLabel myProcessName = new FixedHeightLabel();
private boolean myDisposed;
private long myLastTimeProgressWasAtZero;
private boolean myLastTimeProgressWasZero;
public InlineProgressIndicator(boolean compact, TaskInfo processInfo) {
myCompact = compact;
myInfo = processInfo;
myCancelButton = new InplaceButton(new IconButton(processInfo.getCancelTooltipText(),
IconLoader.getIcon("/process/stop.png"),
IconLoader.getIcon("/process/stopHovered.png")) {
}, new ActionListener() {
public void actionPerformed(final ActionEvent e) {
cancelRequest();
}
}).setFillBg(true);
myCancelButton.setVisible(myInfo.isCancellable());
myCancelButton.setOpaque(true);
myCancelButton.setToolTipText(processInfo.getCancelTooltipText());
myProgress = new MyProgressBar(JProgressBar.HORIZONTAL, compact);
myComponent = new MyComponent(compact, myProcessName);
if (myCompact) {
myComponent.setOpaque(true);
myComponent.setLayout(new BorderLayout(2, 0));
final JPanel textAndProgress = new JPanel(new BorderLayout());
myText.setHorizontalAlignment(JLabel.LEFT);
textAndProgress.add(myText, BorderLayout.CENTER);
final NonOpaquePanel progressWrapper = new NonOpaquePanel(new GridBagLayout());
final GridBagConstraints c = new GridBagConstraints();
c.weightx = 1;
c.weighty = 1;
c.fill = GridBagConstraints.HORIZONTAL;
progressWrapper.add(myProgress, c);
textAndProgress.add(progressWrapper, BorderLayout.EAST);
myComponent.add(textAndProgress, BorderLayout.CENTER);
myComponent.add(myCancelButton, BorderLayout.EAST);
myComponent.setToolTipText(processInfo.getTitle() + ". " + IdeBundle.message("progress.text.clickToViewProgressWindow"));
myProgress.setActive(false);
} else {
myComponent.setLayout(new BorderLayout());
myProcessName.setText(processInfo.getTitle());
myComponent.add(myProcessName, BorderLayout.NORTH);
final Font font = myProcessName.getFont();
final boolean aqua = LafManager.getInstance().isUnderAquaLookAndFeel();
int size = font.getSize() - (aqua ? 4 : 2);
if (size < (aqua ? 8 : 10)) {
size = (aqua ? 8 : 10);
}
myProcessName.setFont(font.deriveFont(Font.PLAIN, size));
myProcessName.setForeground(UIManager.getColor("Panel.background").brighter().brighter());
myProcessName.setBorder(new EmptyBorder(2, 2, 2, 2));
final NonOpaquePanel content = new NonOpaquePanel(new BorderLayout());
content.setBorder(new EmptyBorder(2, 2, 2, 2));
myComponent.add(content, BorderLayout.CENTER);
final Wrapper cancelWrapper = new Wrapper(myCancelButton);
cancelWrapper.setOpaque(false);
cancelWrapper.setBorder(new EmptyBorder(0, 3, 0, 2));
content.add(cancelWrapper, BorderLayout.EAST);
content.add(myText, BorderLayout.NORTH);
content.add(myProgress, BorderLayout.CENTER);
content.add(myText2, BorderLayout.SOUTH);
myComponent.setBorder(new EmptyBorder(2, 2, 2, 2));
myProgress.setActive(false);
}
UIUtil.removeQuaquaVisualMarginsIn(myComponent);
if (!myCompact) {
myText.recomputeSize();
myText2.recomputeSize();
myProcessName.recomputeSize();
}
}
protected void cancelRequest() {
cancel();
}
private void updateRunning() {
queueRunningUpdate(new Runnable() {
public void run() {
}
});
}
protected void updateProgress() {
queueProgressUpdate(new Runnable() {
public void run() {
if (isDisposed()) return;
updateProgressNow();
myComponent.repaint();
}
});
}
public void updateProgressNow() {
if (myLastTimeProgressWasAtZero == 0 && getFraction() == 0) {
myLastTimeProgressWasAtZero = System.currentTimeMillis();
}
final long delta = System.currentTimeMillis() - myLastTimeProgressWasAtZero;
boolean forcedIndeterminite = false;
boolean indeterminate = isIndeterminate();
if (!indeterminate && getFraction() == 0) {
if (delta > 2000 && !myCompact) {
indeterminate = true;
forcedIndeterminite = true;
} else {
forcedIndeterminite = false;
}
}
final boolean visible = getFraction() > 0 || (indeterminate || forcedIndeterminite);
updateVisibility(myProgress, visible);
if (indeterminate || forcedIndeterminite) {
myProgress.setIndeterminate(true);
}
else {
myProgress.setIndeterminate(false);
myProgress.setMinimum(0);
myProgress.setMaximum(100);
}
if (getFraction() > 0) {
myProgress.setValue((int)(getFraction() * 99 + 1));
}
myText.setText(getText() != null ? getText() : "");
myText2.setText(getText2() != null ? getText2() : "");
if (myCompact && myText.getText().length() == 0) {
myText.setText(myInfo.getTitle());
}
myCancelButton.setPainting(isCancelable());
if (getFraction() == 0) {
if (!myLastTimeProgressWasZero) {
myLastTimeProgressWasAtZero = System.currentTimeMillis();
myLastTimeProgressWasZero = true;
}
} else {
myLastTimeProgressWasZero = false;
}
final boolean isStopping = wasStarted() && (isCanceled() || !isRunning()) && !isFinished();
if (isStopping) {
if (myCompact) {
myText.setText("Stopping - " + myText.getText());
} else {
myProcessName.setText("Stopping - " + myInfo.getTitle());
}
myText.setEnabled(false);
myText2.setEnabled(false);
myProgress.setEnabled(false);
myCancelButton.setPainting(false);
} else {
myText.setEnabled(true);
myText2.setEnabled(true);
myProgress.setEnabled(true);
myCancelButton.setPainting(true);
}
}
protected boolean isFinished() {
return false;
}
protected void queueProgressUpdate(Runnable update) {
update.run();
}
protected void queueRunningUpdate(Runnable update) {
update.run();
}
private void updateVisibility(MyProgressBar bar, boolean holdsValue) {
if (holdsValue && !bar.isActive()) {
bar.setActive(true);
bar.revalidate();
bar.repaint();
myComponent.revalidate();
myComponent.repaint();
}
else if (!holdsValue && bar.isActive()) {
bar.setActive(false);
bar.revalidate();
bar.repaint();
myComponent.revalidate();
myComponent.repaint();
}
}
protected void onProgressChange() {
updateProgress();
}
protected void onRunningChange() {
updateRunning();
}
public JComponent getComponent() {
return myComponent;
}
public boolean isCompact() {
return myCompact;
}
public TaskInfo getInfo() {
return myInfo;
}
private static class FixedHeightLabel extends JLabel {
private Dimension myPrefSize;
public FixedHeightLabel() {
}
public void recomputeSize() {
final String old = getText();
setText("XXX");
myPrefSize = getPreferredSize();
setText(old);
}
public Dimension getPreferredSize() {
final Dimension size = super.getPreferredSize();
if (myPrefSize != null) {
size.height = myPrefSize.height;
}
return size;
}
}
private static class MyProgressBar extends JProgressBar {
private boolean myActive = true;
private final boolean myCompact;
public MyProgressBar(final int orient, boolean compact) {
super(orient);
myCompact = compact;
putClientProperty("JComponent.sizeVariant", "mini");
}
public void paint(final Graphics g) {
if (!myActive) return;
super.paint(g);
}
public boolean isActive() {
return myActive;
}
public Dimension getPreferredSize() {
if (!myActive && myCompact) return new Dimension(0, 0);
return super.getPreferredSize();
}
public void setActive(final boolean active) {
myActive = active;
}
}
private class MyComponent extends JPanel {
private final boolean myCompact;
private final FixedHeightLabel myProcessName;
private MyComponent(final boolean compact, final FixedHeightLabel processName) {
myCompact = compact;
myProcessName = processName;
addMouseListener(new MouseAdapter() {
public void mousePressed(final MouseEvent e) {
if (UIUtil.isCloseClick(e) && getBounds().contains(e.getX(), e.getY())) {
cancelRequest();
}
}
});
}
protected void paintComponent(final Graphics g) {
if (myCompact) {
super.paintComponent(g);
return;
}
final GraphicsConfig c = new GraphicsConfig(g);
c.setAntialiasing(true);
int arc = 8;
g.setColor(UIManager.getColor("Panel.background"));
g.fillRoundRect(0, 0, getWidth() - 1, getHeight() - 1, arc, arc);
Color bg = getBackground().darker().darker();
bg = new Color(bg.getRed(), bg.getGreen(), bg.getBlue(), 230);
g.setColor(bg);
final Rectangle bounds = myProcessName.getBounds();
final Rectangle label = SwingUtilities.convertRectangle(myProcessName.getParent(), bounds, this);
g.fillRoundRect(0, 0, getWidth() - 1, getHeight() - 1, arc, arc);
g.setColor(UIManager.getColor("Panel.background"));
g.fillRoundRect(0, getHeight() / 2, getWidth() - 1, getHeight() / 2, arc, arc);
g.fillRect(0, (int)label.getMaxY() + 1, getWidth() - 1, getHeight() / 2);
g.setColor(bg);
g.drawRoundRect(0, 0, getWidth() - 1, getHeight() - 1, arc, arc);
c.restore();
}
}
public void dispose() {
if (myDisposed) return;
myDisposed = true;
myComponent.removeAll();
myComponent = null;
if (myProgress != null) {
UIUtil.disposeProgress(myProgress);
}
myProgress = null;
myInfo = null;
}
private boolean isDisposed() {
return myDisposed;
}
}
|
platform/platform-impl/src/com/intellij/openapi/wm/impl/status/InlineProgressIndicator.java
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.wm.impl.status;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.ui.LafManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.progress.TaskInfo;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.ui.popup.IconButton;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.wm.impl.content.GraphicsConfig;
import com.intellij.ui.InplaceButton;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.ui.components.panels.Wrapper;
import com.intellij.util.ui.UIUtil;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
public class InlineProgressIndicator extends ProgressIndicatorBase implements Disposable {
private final FixedHeightLabel myText = new FixedHeightLabel();
private final FixedHeightLabel myText2 = new FixedHeightLabel();
private MyProgressBar myProgress;
private JPanel myComponent;
private final InplaceButton myCancelButton;
private final boolean myCompact;
private TaskInfo myInfo;
private final FixedHeightLabel myProcessName = new FixedHeightLabel();
private boolean myDisposed;
private long myLastTimeProgressWasAtZero;
private boolean myLastTimeProgressWasZero;
public InlineProgressIndicator(boolean compact, TaskInfo processInfo) {
myCompact = compact;
myInfo = processInfo;
myCancelButton = new InplaceButton(new IconButton(processInfo.getCancelTooltipText(),
IconLoader.getIcon("/process/stop.png"),
IconLoader.getIcon("/process/stopHovered.png")) {
}, new ActionListener() {
public void actionPerformed(final ActionEvent e) {
cancelRequest();
}
}).setFillBg(true);
myCancelButton.setVisible(myInfo.isCancellable());
myCancelButton.setOpaque(true);
myCancelButton.setToolTipText(processInfo.getCancelTooltipText());
myProgress = new MyProgressBar(JProgressBar.HORIZONTAL, compact);
myComponent = new MyComponent(compact, myProcessName);
if (myCompact) {
myComponent.setOpaque(true);
myComponent.setLayout(new BorderLayout(2, 0));
final JPanel textAndProgress = new JPanel(new BorderLayout());
myText.setHorizontalAlignment(JLabel.LEFT);
textAndProgress.add(myText, BorderLayout.CENTER);
final NonOpaquePanel progressWrapper = new NonOpaquePanel(new GridBagLayout());
final GridBagConstraints c = new GridBagConstraints();
c.weightx = 1;
c.weighty = 1;
c.fill = GridBagConstraints.HORIZONTAL;
progressWrapper.add(myProgress, c);
textAndProgress.add(progressWrapper, BorderLayout.EAST);
myComponent.add(textAndProgress, BorderLayout.CENTER);
myComponent.add(myCancelButton, BorderLayout.EAST);
myComponent.setToolTipText(processInfo.getTitle() + ". " + IdeBundle.message("progress.text.clickToViewProgressWindow"));
myProgress.setActive(false);
} else {
myComponent.setLayout(new BorderLayout());
myProcessName.setText(processInfo.getTitle());
myComponent.add(myProcessName, BorderLayout.NORTH);
final Font font = myProcessName.getFont();
final boolean aqua = LafManager.getInstance().isUnderAquaLookAndFeel();
int size = font.getSize() - (aqua ? 4 : 2);
if (size < (aqua ? 8 : 10)) {
size = (aqua ? 8 : 10);
}
myProcessName.setFont(font.deriveFont(Font.PLAIN, size));
myProcessName.setForeground(UIManager.getColor("Panel.background").brighter().brighter());
myProcessName.setBorder(new EmptyBorder(2, 2, 2, 2));
final NonOpaquePanel content = new NonOpaquePanel(new BorderLayout());
content.setBorder(new EmptyBorder(2, 2, 2, 2));
myComponent.add(content, BorderLayout.CENTER);
final Wrapper cancelWrapper = new Wrapper(myCancelButton);
cancelWrapper.setOpaque(false);
cancelWrapper.setBorder(new EmptyBorder(0, 3, 0, 2));
content.add(cancelWrapper, BorderLayout.EAST);
content.add(myText, BorderLayout.NORTH);
content.add(myProgress, BorderLayout.CENTER);
content.add(myText2, BorderLayout.SOUTH);
myComponent.setBorder(new EmptyBorder(2, 2, 2, 2));
myProgress.setActive(false);
}
UIUtil.removeQuaquaVisualMarginsIn(myComponent);
if (!myCompact) {
myText.recomputeSize();
myText2.recomputeSize();
myProcessName.recomputeSize();
}
}
protected void cancelRequest() {
cancel();
}
private void updateRunning() {
queueRunningUpdate(new Runnable() {
public void run() {
}
});
}
protected void updateProgress() {
queueProgressUpdate(new Runnable() {
public void run() {
if (isDisposed()) return;
updateProgressNow();
myComponent.repaint();
}
});
}
public void updateProgressNow() {
if (myLastTimeProgressWasAtZero == 0 && getFraction() == 0) {
myLastTimeProgressWasAtZero = System.currentTimeMillis();
}
final long delta = System.currentTimeMillis() - myLastTimeProgressWasAtZero;
boolean forcedIndeterminite = false;
boolean indeterminate = isIndeterminate();
if (!indeterminate && getFraction() == 0) {
if (delta > 2000 && !myCompact) {
indeterminate = true;
forcedIndeterminite = true;
} else {
forcedIndeterminite = false;
}
}
final boolean visible = getFraction() > 0 || (indeterminate || forcedIndeterminite);
updateVisibility(myProgress, visible);
if (indeterminate || forcedIndeterminite) {
myProgress.setIndeterminate(true);
}
else {
myProgress.setIndeterminate(false);
myProgress.setMinimum(0);
myProgress.setMaximum(100);
}
if (getFraction() > 0) {
myProgress.setValue((int)(getFraction() * 99 + 1));
}
myText.setText(getText() != null ? getText() : "");
myText2.setText(getText2() != null ? getText2() : "");
if (myCompact && myText.getText().length() == 0) {
myText.setText(myInfo.getTitle());
}
myCancelButton.setPainting(isCancelable());
if (getFraction() == 0) {
if (!myLastTimeProgressWasZero) {
myLastTimeProgressWasAtZero = System.currentTimeMillis();
myLastTimeProgressWasZero = true;
}
} else {
myLastTimeProgressWasZero = false;
}
final boolean isStopping = wasStarted() && (isCanceled() || !isRunning()) && !isFinished();
if (isStopping) {
if (myCompact) {
myText.setText("Stopping - " + myText.getText());
} else {
myProcessName.setText("Stopping - " + myInfo.getTitle());
}
myText.setEnabled(false);
myText2.setEnabled(false);
myProgress.setEnabled(false);
myCancelButton.setPainting(false);
} else {
myText.setEnabled(true);
myText2.setEnabled(true);
myProgress.setEnabled(true);
myCancelButton.setPainting(true);
}
}
protected boolean isFinished() {
return false;
}
protected void queueProgressUpdate(Runnable update) {
update.run();
}
protected void queueRunningUpdate(Runnable update) {
update.run();
}
private void updateVisibility(MyProgressBar bar, boolean holdsValue) {
if (holdsValue && !bar.isActive()) {
bar.setActive(true);
bar.revalidate();
bar.repaint();
myComponent.revalidate();
myComponent.repaint();
}
else if (!holdsValue && bar.isActive()) {
bar.setActive(false);
bar.revalidate();
bar.repaint();
myComponent.revalidate();
myComponent.repaint();
}
}
protected void onProgressChange() {
updateProgress();
}
protected void onRunningChange() {
updateRunning();
}
public JComponent getComponent() {
return myComponent;
}
public boolean isCompact() {
return myCompact;
}
public TaskInfo getInfo() {
return myInfo;
}
private static class FixedHeightLabel extends JLabel {
private Dimension myPrefSize;
public FixedHeightLabel() {
}
public void recomputeSize() {
final String old = getText();
setText("XXX");
myPrefSize = getPreferredSize();
setText(old);
}
public Dimension getPreferredSize() {
final Dimension size = super.getPreferredSize();
if (myPrefSize != null) {
size.height = myPrefSize.height;
}
return size;
}
}
private static class MyProgressBar extends JProgressBar {
private boolean myActive = true;
private final boolean myCompact;
public MyProgressBar(final int orient, boolean compact) {
super(orient);
myCompact = compact;
putClientProperty("JComponent.sizeVariant", "mini");
}
public void paint(final Graphics g) {
if (!myActive) return;
super.paint(g);
}
public boolean isActive() {
return myActive;
}
public Dimension getPreferredSize() {
if (!myActive && myCompact) return new Dimension(0, 0);
return super.getPreferredSize();
}
public void setActive(final boolean active) {
myActive = active;
}
}
private class MyComponent extends JPanel {
private final boolean myCompact;
private final FixedHeightLabel myProcessName;
private MyComponent(final boolean compact, final FixedHeightLabel processName) {
myCompact = compact;
myProcessName = processName;
addMouseListener(new MouseAdapter() {
public void mousePressed(final MouseEvent e) {
if (UIUtil.isCloseClick(e) && getBounds().contains(e.getX(), e.getY())) {
cancelRequest();
}
}
});
}
protected void paintComponent(final Graphics g) {
if (myCompact) {
super.paintComponent(g);
return;
}
final GraphicsConfig c = new GraphicsConfig(g);
c.setAntialiasing(true);
int arc = 8;
Color bg = getBackground().darker().darker();
bg = new Color(bg.getRed(), bg.getGreen(), bg.getBlue(), 230);
g.setColor(bg);
final Rectangle bounds = myProcessName.getBounds();
final Rectangle label = SwingUtilities.convertRectangle(myProcessName.getParent(), bounds, this);
g.fillRoundRect(0, 0, getWidth() - 1, getHeight() - 1, arc, arc);
g.setColor(UIManager.getColor("Panel.background"));
g.fillRoundRect(0, getHeight() / 2, getWidth() - 1, getHeight() / 2, arc, arc);
g.fillRect(0, (int)label.getMaxY() + 1, getWidth() - 1, getHeight() / 2);
g.setColor(bg);
g.drawRoundRect(0, 0, getWidth() - 1, getHeight() - 1, arc, arc);
c.restore();
}
}
public void dispose() {
if (myDisposed) return;
myDisposed = true;
myComponent.removeAll();
myComponent = null;
if (myProgress != null) {
UIUtil.disposeProgress(myProgress);
}
myProgress = null;
myInfo = null;
}
private boolean isDisposed() {
return myDisposed;
}
}
|
fix artifacts in background process popup
|
platform/platform-impl/src/com/intellij/openapi/wm/impl/status/InlineProgressIndicator.java
|
fix artifacts in background process popup
|
|
Java
|
apache-2.0
|
aabcd28ac5c65dddc131f201f181224b8481e65b
| 0
|
wmedvede/guvnor,porcelli-forks/guvnor,droolsjbpm/guvnor,kiereleaseuser/guvnor,adrielparedes/guvnor,kiereleaseuser/guvnor,wmedvede/guvnor,mbiarnes/guvnor,droolsjbpm/guvnor,hxf0801/guvnor,hxf0801/guvnor,droolsjbpm/guvnor,adrielparedes/guvnor,etirelli/guvnor,kiereleaseuser/guvnor,yurloc/guvnor,etirelli/guvnor,hxf0801/guvnor,mbiarnes/guvnor,adrielparedes/guvnor,wmedvede/guvnor,mswiderski/guvnor,nmirasch/guvnor,baldimir/guvnor,yurloc/guvnor,nmirasch/guvnor,porcelli-forks/guvnor,mbiarnes/guvnor,baldimir/guvnor,porcelli-forks/guvnor,nmirasch/guvnor,baldimir/guvnor,etirelli/guvnor
|
/*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.guvnor.projecteditor.client.forms;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.New;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
import org.jboss.errai.bus.client.api.RemoteCallback;
import org.jboss.errai.ioc.client.api.Caller;
import org.kie.guvnor.project.service.KModuleService;
import org.kie.guvnor.project.service.ProjectService;
import org.kie.guvnor.projecteditor.client.resources.i18n.ProjectEditorConstants;
import org.kie.workbench.common.services.shared.builder.BuildService;
import org.kie.workbench.common.services.shared.metadata.MetadataService;
import org.kie.workbench.common.services.shared.metadata.model.Metadata;
import org.kie.workbench.common.widgets.client.callbacks.HasBusyIndicatorDefaultErrorCallback;
import org.kie.workbench.common.widgets.client.popups.file.CommandWithCommitMessage;
import org.kie.workbench.common.widgets.client.popups.file.SaveOperationService;
import org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants;
import org.kie.workbench.common.widgets.configresource.client.widget.unbound.ImportsWidgetPresenter;
import org.uberfire.backend.vfs.Path;
import org.uberfire.client.annotations.WorkbenchMenu;
import org.uberfire.client.annotations.WorkbenchPartTitle;
import org.uberfire.client.annotations.WorkbenchPartView;
import org.uberfire.client.annotations.WorkbenchScreen;
import org.uberfire.client.context.WorkbenchContext;
import org.uberfire.client.mvp.Command;
import org.uberfire.client.workbench.widgets.events.PathChangeEvent;
import org.uberfire.client.workbench.widgets.menu.MenuFactory;
import org.uberfire.client.workbench.widgets.menu.Menus;
@WorkbenchScreen(identifier = "projectScreen")
public class ProjectScreenPresenter
implements ProjectScreenView.Presenter {
private final ImportsWidgetPresenter importsWidgetPresenter;
private ProjectScreenView view;
private POMEditorPanel pomPanel;
private KModuleEditorPanel kModuleEditorPanel;
private Caller<KModuleService> kModuleServiceCaller;
private Caller<BuildService> buildServiceCaller;
private Path pathToPomXML;
private Path pathToKModuleXML;
private Path pathToProjectImports;
private Caller<MetadataService> metadataService;
private Metadata kmoduleMetadata;
private Metadata projectImportsMetadata;
private Metadata pomMetadata;
private SaveOperationService saveOperationService;
private Menus menus;
private Caller<ProjectService> projectService;
public ProjectScreenPresenter(){
}
@Inject
public ProjectScreenPresenter(@New ProjectScreenView view,
@New POMEditorPanel pomPanel,
@New KModuleEditorPanel kModuleEditorPanel,
@New ImportsWidgetPresenter importsWidgetPresenter,
WorkbenchContext workbenchContext,
Caller<ProjectService> projectService,
Caller<KModuleService> kModuleServiceCaller,
Caller<BuildService> buildServiceCaller,
Caller<MetadataService> metadataService,
SaveOperationService saveOperationService) {
this.view = view;
this.pomPanel = pomPanel;
this.kModuleEditorPanel = kModuleEditorPanel;
this.importsWidgetPresenter = importsWidgetPresenter;
this.kModuleServiceCaller = kModuleServiceCaller;
this.buildServiceCaller = buildServiceCaller;
this.metadataService = metadataService;
this.saveOperationService = saveOperationService;
this.projectService = projectService;
view.setPresenter(this);
view.setPOMEditorPanel(pomPanel);
view.setKModuleEditorPanel(kModuleEditorPanel);
view.setImportsPage(importsWidgetPresenter);
showCurrentProjectInfoIfAny(workbenchContext.getActivePath());
makeMenuBar();
}
public void selectedPathChanged(@Observes final PathChangeEvent event) {
showCurrentProjectInfoIfAny(event.getPath());
}
private void showCurrentProjectInfoIfAny(Path path) {
projectService.call(new RemoteCallback<Path>() {
@Override
public void callback(Path pathToPomXML) {
// TODO: Check save if there are changes -Rikkola-
if (pathToPomXML != null && (ProjectScreenPresenter.this.pathToPomXML == null || !ProjectScreenPresenter.this.pathToPomXML.equals(pathToPomXML))) {
// if (ProjectScreenPresenter.this.pathToPomXML != null
// && pomPanel.isDirty()
// ) {
// Window.alert("There are unsaved changes");
// } else {
ProjectScreenPresenter.this.pathToPomXML = pathToPomXML;
init();
view.selectMainTab();
pomMetadata = null;
kmoduleMetadata = null;
// }
}
}
}).resolvePathToPom(path);
}
private void init() {
view.showBusyIndicator(CommonConstants.INSTANCE.Loading());
pomPanel.init(pathToPomXML, false);
addKModuleEditor();
view.hideBusyIndicator();
}
private void makeMenuBar() {
menus = MenuFactory
.newTopLevelMenu(CommonConstants.INSTANCE.File())
.menus()
.menu(CommonConstants.INSTANCE.Save())
.respondsWith(getSaveCommand())
.endMenu()
.endMenus()
.endMenu()
.newTopLevelMenu(view.getBuildMenuItemText())
.respondsWith(new Command() {
@Override
public void execute() {
view.showBusyIndicator(ProjectEditorConstants.INSTANCE.Building());
buildServiceCaller.call(getBuildSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).buildAndDeploy(pathToPomXML);
}
})
.endMenu().build();
// For now every module is a kie project.
// if (pathToKModuleXML == null) {
// menus.addItem(new DefaultMenuItemCommand(
// view.getEnableKieProjectMenuItemText(),
// new Command() {
// @Override
// public void execute() {
// projectEditorServiceCaller.call(
// new RemoteCallback<Path>() {
// @Override
// public void callback(Path pathToKProject) {
// pathToKModuleXML = pathToKProject;
// setUpKProject(pathToKProject);
// }
// }
// ).setUpKModuleStructure(pathToPomXML);
// }
// }
// ));
// }
}
private Command getSaveCommand() {
return new Command() {
@Override
public void execute() {
saveOperationService.save(pathToPomXML,
new CommandWithCommitMessage() {
@Override
public void execute(final String comment) {
view.showBusyIndicator(CommonConstants.INSTANCE.Saving());
// We need to use callback here or jgit will break when we save two files at the same time.
pomPanel.save(comment,
new Command() {
@Override
public void execute() {
if (kModuleEditorPanel.hasBeenInitialized()) {
kModuleEditorPanel.save(comment,
new Command() {
@Override
public void execute() {
importsWidgetPresenter.save(comment, projectImportsMetadata);
}
},
kmoduleMetadata);
}
view.hideBusyIndicator();
}
}, pomMetadata);
}
});
}
};
}
private RemoteCallback getBuildSuccessCallback() {
return new RemoteCallback<Void>() {
@Override
public void callback(final Void v) {
view.hideBusyIndicator();
}
};
}
private void addKModuleEditor() {
kModuleServiceCaller.call(getResolveKModulePathSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).pathToRelatedKModuleFileIfAny(pathToPomXML);
}
private RemoteCallback<Path> getResolveKModulePathSuccessCallback() {
return new RemoteCallback<Path>() {
@Override
public void callback(final Path pathToKModuleXML) {
ProjectScreenPresenter.this.pathToKModuleXML = pathToKModuleXML;
if (kModuleEditorPanel.hasBeenInitialized()) {
kModuleEditorPanel.init(pathToKModuleXML, false);
}
}
};
}
@WorkbenchPartTitle
public String getTitle() {
return ProjectEditorConstants.INSTANCE.ProjectScreen();
}
@WorkbenchPartView
public Widget asWidget() {
return view.asWidget();
}
@WorkbenchMenu
public Menus getMenus() {
return menus;
}
@Override
public void onPOMMetadataTabSelected() {
if (pomMetadata == null) {
view.showBusyIndicator(CommonConstants.INSTANCE.Loading());
metadataService.call(getPOMMetadataSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).getMetadata(pathToPomXML);
}
}
@Override
public void onKModuleTabSelected() {
if (!kModuleEditorPanel.hasBeenInitialized()) {
kModuleEditorPanel.init(pathToKModuleXML, false);
}
}
@Override
public void onKModuleMetadataTabSelected() {
if (kmoduleMetadata == null) {
view.showBusyIndicator(CommonConstants.INSTANCE.Loading());
metadataService.call(getKModuleMetadataSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).getMetadata(pathToKModuleXML);
}
}
private RemoteCallback<Metadata> getPOMMetadataSuccessCallback() {
return new RemoteCallback<Metadata>() {
@Override
public void callback(final Metadata metadata) {
pomMetadata = metadata;
view.hideBusyIndicator();
view.setPOMMetadata(metadata);
}
};
}
private RemoteCallback<Metadata> getKModuleMetadataSuccessCallback() {
return new RemoteCallback<Metadata>() {
@Override
public void callback(final Metadata metadata) {
kmoduleMetadata = metadata;
view.hideBusyIndicator();
view.setKModuleMetadata(metadata);
}
};
}
@Override
public void onImportsPageSelected() {
if (!importsWidgetPresenter.hasBeenInitialized()) {
projectService.call(new RemoteCallback<Path>() {
@Override
public void callback(Path path) {
pathToProjectImports = path;
importsWidgetPresenter.init(pathToProjectImports, false);
}
}).resolvePathToProjectImports(pathToPomXML);
}
}
@Override
public void onImportsMetadataTabSelected() {
if (projectImportsMetadata == null) {
view.showBusyIndicator(CommonConstants.INSTANCE.Loading());
metadataService.call(getProjectImportsMetadataSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).getMetadata(pathToProjectImports);
}
}
private RemoteCallback<Metadata> getProjectImportsMetadataSuccessCallback() {
return new RemoteCallback<Metadata>() {
@Override
public void callback(final Metadata metadata) {
projectImportsMetadata = metadata;
view.hideBusyIndicator();
view.setProjectImportsMetadata(metadata);
}
};
}
}
|
guvnor-ng/guvnor-editors/guvnor-project-editor/guvnor-project-editor-client/src/main/java/org/kie/guvnor/projecteditor/client/forms/ProjectScreenPresenter.java
|
/*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.guvnor.projecteditor.client.forms;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.New;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
import org.jboss.errai.bus.client.api.RemoteCallback;
import org.jboss.errai.ioc.client.api.Caller;
import org.kie.guvnor.project.service.KModuleService;
import org.kie.guvnor.project.service.ProjectService;
import org.kie.guvnor.projecteditor.client.resources.i18n.ProjectEditorConstants;
import org.kie.workbench.common.services.shared.builder.BuildService;
import org.kie.workbench.common.services.shared.metadata.MetadataService;
import org.kie.workbench.common.services.shared.metadata.model.Metadata;
import org.kie.workbench.common.widgets.client.callbacks.HasBusyIndicatorDefaultErrorCallback;
import org.kie.workbench.common.widgets.client.popups.file.CommandWithCommitMessage;
import org.kie.workbench.common.widgets.client.popups.file.SaveOperationService;
import org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants;
import org.kie.workbench.common.widgets.configresource.client.widget.unbound.ImportsWidgetPresenter;
import org.uberfire.backend.vfs.Path;
import org.uberfire.client.annotations.WorkbenchMenu;
import org.uberfire.client.annotations.WorkbenchPartTitle;
import org.uberfire.client.annotations.WorkbenchPartView;
import org.uberfire.client.annotations.WorkbenchScreen;
import org.uberfire.client.context.WorkbenchContext;
import org.uberfire.client.mvp.Command;
import org.uberfire.client.workbench.widgets.events.PathChangeEvent;
import org.uberfire.client.workbench.widgets.menu.MenuFactory;
import org.uberfire.client.workbench.widgets.menu.Menus;
@WorkbenchScreen(identifier = "projectScreen")
public class ProjectScreenPresenter
implements ProjectScreenView.Presenter {
private final ImportsWidgetPresenter importsWidgetPresenter;
private ProjectScreenView view;
private POMEditorPanel pomPanel;
private KModuleEditorPanel kModuleEditorPanel;
private Caller<KModuleService> kModuleServiceCaller;
private Caller<BuildService> buildServiceCaller;
private Path pathToPomXML;
private Path pathToKModuleXML;
private Path pathToProjectImports;
private Caller<MetadataService> metadataService;
private Metadata kmoduleMetadata;
private Metadata projectImportsMetadata;
private Metadata pomMetadata;
private SaveOperationService saveOperationService;
private Menus menus;
private Caller<ProjectService> projectService;
@Inject
public ProjectScreenPresenter(@New ProjectScreenView view,
@New POMEditorPanel pomPanel,
@New KModuleEditorPanel kModuleEditorPanel,
@New ImportsWidgetPresenter importsWidgetPresenter,
WorkbenchContext workbenchContext,
Caller<ProjectService> projectService,
Caller<KModuleService> kModuleServiceCaller,
Caller<BuildService> buildServiceCaller,
Caller<MetadataService> metadataService,
SaveOperationService saveOperationService) {
this.view = view;
this.pomPanel = pomPanel;
this.kModuleEditorPanel = kModuleEditorPanel;
this.importsWidgetPresenter = importsWidgetPresenter;
this.kModuleServiceCaller = kModuleServiceCaller;
this.buildServiceCaller = buildServiceCaller;
this.metadataService = metadataService;
this.saveOperationService = saveOperationService;
this.projectService = projectService;
view.setPresenter(this);
view.setPOMEditorPanel(pomPanel);
view.setKModuleEditorPanel(kModuleEditorPanel);
view.setImportsPage(importsWidgetPresenter);
showCurrentProjectInfoIfAny(workbenchContext.getActivePath());
makeMenuBar();
}
public void selectedPathChanged(@Observes final PathChangeEvent event) {
showCurrentProjectInfoIfAny(event.getPath());
}
private void showCurrentProjectInfoIfAny(Path path) {
projectService.call(new RemoteCallback<Path>() {
@Override
public void callback(Path pathToPomXML) {
// TODO: Check save if there are changes -Rikkola-
if (pathToPomXML != null && (ProjectScreenPresenter.this.pathToPomXML == null || !ProjectScreenPresenter.this.pathToPomXML.equals(pathToPomXML))) {
// if (ProjectScreenPresenter.this.pathToPomXML != null
// && pomPanel.isDirty()
// ) {
// Window.alert("There are unsaved changes");
// } else {
ProjectScreenPresenter.this.pathToPomXML = pathToPomXML;
init();
view.selectMainTab();
pomMetadata = null;
kmoduleMetadata = null;
// }
}
}
}).resolvePathToPom(path);
}
private void init() {
view.showBusyIndicator(CommonConstants.INSTANCE.Loading());
pomPanel.init(pathToPomXML, false);
addKModuleEditor();
view.hideBusyIndicator();
}
private void makeMenuBar() {
menus = MenuFactory
.newTopLevelMenu(CommonConstants.INSTANCE.File())
.menus()
.menu(CommonConstants.INSTANCE.Save())
.respondsWith(getSaveCommand())
.endMenu()
.endMenus()
.endMenu()
.newTopLevelMenu(view.getBuildMenuItemText())
.respondsWith(new Command() {
@Override
public void execute() {
view.showBusyIndicator(ProjectEditorConstants.INSTANCE.Building());
buildServiceCaller.call(getBuildSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).buildAndDeploy(pathToPomXML);
}
})
.endMenu().build();
// For now every module is a kie project.
// if (pathToKModuleXML == null) {
// menus.addItem(new DefaultMenuItemCommand(
// view.getEnableKieProjectMenuItemText(),
// new Command() {
// @Override
// public void execute() {
// projectEditorServiceCaller.call(
// new RemoteCallback<Path>() {
// @Override
// public void callback(Path pathToKProject) {
// pathToKModuleXML = pathToKProject;
// setUpKProject(pathToKProject);
// }
// }
// ).setUpKModuleStructure(pathToPomXML);
// }
// }
// ));
// }
}
private Command getSaveCommand() {
return new Command() {
@Override
public void execute() {
saveOperationService.save(pathToPomXML,
new CommandWithCommitMessage() {
@Override
public void execute(final String comment) {
view.showBusyIndicator(CommonConstants.INSTANCE.Saving());
// We need to use callback here or jgit will break when we save two files at the same time.
pomPanel.save(comment,
new Command() {
@Override
public void execute() {
if (kModuleEditorPanel.hasBeenInitialized()) {
kModuleEditorPanel.save(comment,
new Command() {
@Override
public void execute() {
importsWidgetPresenter.save(comment, projectImportsMetadata);
}
},
kmoduleMetadata);
}
view.hideBusyIndicator();
}
}, pomMetadata);
}
});
}
};
}
private RemoteCallback getBuildSuccessCallback() {
return new RemoteCallback<Void>() {
@Override
public void callback(final Void v) {
view.hideBusyIndicator();
}
};
}
private void addKModuleEditor() {
kModuleServiceCaller.call(getResolveKModulePathSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).pathToRelatedKModuleFileIfAny(pathToPomXML);
}
private RemoteCallback<Path> getResolveKModulePathSuccessCallback() {
return new RemoteCallback<Path>() {
@Override
public void callback(final Path pathToKModuleXML) {
ProjectScreenPresenter.this.pathToKModuleXML = pathToKModuleXML;
if (kModuleEditorPanel.hasBeenInitialized()) {
kModuleEditorPanel.init(pathToKModuleXML, false);
}
}
};
}
@WorkbenchPartTitle
public String getTitle() {
return ProjectEditorConstants.INSTANCE.ProjectScreen();
}
@WorkbenchPartView
public Widget asWidget() {
return view.asWidget();
}
@WorkbenchMenu
public Menus getMenus() {
return menus;
}
@Override
public void onPOMMetadataTabSelected() {
if (pomMetadata == null) {
view.showBusyIndicator(CommonConstants.INSTANCE.Loading());
metadataService.call(getPOMMetadataSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).getMetadata(pathToPomXML);
}
}
@Override
public void onKModuleTabSelected() {
if (!kModuleEditorPanel.hasBeenInitialized()) {
kModuleEditorPanel.init(pathToKModuleXML, false);
}
}
@Override
public void onKModuleMetadataTabSelected() {
if (kmoduleMetadata == null) {
view.showBusyIndicator(CommonConstants.INSTANCE.Loading());
metadataService.call(getKModuleMetadataSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).getMetadata(pathToKModuleXML);
}
}
private RemoteCallback<Metadata> getPOMMetadataSuccessCallback() {
return new RemoteCallback<Metadata>() {
@Override
public void callback(final Metadata metadata) {
pomMetadata = metadata;
view.hideBusyIndicator();
view.setPOMMetadata(metadata);
}
};
}
private RemoteCallback<Metadata> getKModuleMetadataSuccessCallback() {
return new RemoteCallback<Metadata>() {
@Override
public void callback(final Metadata metadata) {
kmoduleMetadata = metadata;
view.hideBusyIndicator();
view.setKModuleMetadata(metadata);
}
};
}
@Override
public void onImportsPageSelected() {
if (!importsWidgetPresenter.hasBeenInitialized()) {
projectService.call(new RemoteCallback<Path>() {
@Override
public void callback(Path path) {
pathToProjectImports = path;
importsWidgetPresenter.init(pathToProjectImports, false);
}
}).resolvePathToProjectImports(pathToPomXML);
}
}
@Override
public void onImportsMetadataTabSelected() {
if (projectImportsMetadata == null) {
view.showBusyIndicator(CommonConstants.INSTANCE.Loading());
metadataService.call(getProjectImportsMetadataSuccessCallback(),
new HasBusyIndicatorDefaultErrorCallback(view)).getMetadata(pathToProjectImports);
}
}
private RemoteCallback<Metadata> getProjectImportsMetadataSuccessCallback() {
return new RemoteCallback<Metadata>() {
@Override
public void callback(final Metadata metadata) {
projectImportsMetadata = metadata;
view.hideBusyIndicator();
view.setProjectImportsMetadata(metadata);
}
};
}
}
|
fix for circular deps
|
guvnor-ng/guvnor-editors/guvnor-project-editor/guvnor-project-editor-client/src/main/java/org/kie/guvnor/projecteditor/client/forms/ProjectScreenPresenter.java
|
fix for circular deps
|
|
Java
|
apache-2.0
|
cac78f467b15d759b0426928dc8c5c994aa17f3c
| 0
|
sbrossie/killbill,sbrossie/killbill,killbill/killbill,sbrossie/killbill,killbill/killbill,sbrossie/killbill,sbrossie/killbill,killbill/killbill,killbill/killbill,killbill/killbill
|
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2018 Groupon, Inc
* Copyright 2014-2018 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.jaxrs.resources;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.PropertyResourceBundle;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import org.joda.time.LocalDate;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.ObjectType;
import org.killbill.billing.account.api.Account;
import org.killbill.billing.account.api.AccountApiException;
import org.killbill.billing.account.api.AccountUserApi;
import org.killbill.billing.catalog.DefaultPlanPhasePriceOverride;
import org.killbill.billing.catalog.api.BillingActionPolicy;
import org.killbill.billing.catalog.api.Currency;
import org.killbill.billing.catalog.api.PlanPhasePriceOverride;
import org.killbill.billing.catalog.api.PlanPhaseSpecifier;
import org.killbill.billing.catalog.api.ProductCategory;
import org.killbill.billing.entitlement.api.SubscriptionApiException;
import org.killbill.billing.entitlement.api.SubscriptionEventType;
import org.killbill.billing.invoice.api.DryRunArguments;
import org.killbill.billing.invoice.api.DryRunType;
import org.killbill.billing.invoice.api.Invoice;
import org.killbill.billing.invoice.api.InvoiceApiException;
import org.killbill.billing.invoice.api.InvoiceItem;
import org.killbill.billing.invoice.api.InvoicePayment;
import org.killbill.billing.invoice.api.InvoiceUserApi;
import org.killbill.billing.jaxrs.json.CustomFieldJson;
import org.killbill.billing.jaxrs.json.InvoiceDryRunJson;
import org.killbill.billing.jaxrs.json.InvoiceItemJson;
import org.killbill.billing.jaxrs.json.InvoiceJson;
import org.killbill.billing.jaxrs.json.InvoicePaymentJson;
import org.killbill.billing.jaxrs.json.PhasePriceOverrideJson;
import org.killbill.billing.jaxrs.json.TagJson;
import org.killbill.billing.jaxrs.util.Context;
import org.killbill.billing.jaxrs.util.JaxrsUriBuilder;
import org.killbill.billing.payment.api.Payment;
import org.killbill.billing.payment.api.PaymentApi;
import org.killbill.billing.payment.api.PaymentApiException;
import org.killbill.billing.payment.api.PluginProperty;
import org.killbill.billing.tenant.api.TenantApiException;
import org.killbill.billing.tenant.api.TenantKV.TenantKey;
import org.killbill.billing.tenant.api.TenantUserApi;
import org.killbill.billing.util.LocaleUtils;
import org.killbill.billing.util.api.AuditUserApi;
import org.killbill.billing.util.api.CustomFieldApiException;
import org.killbill.billing.util.api.CustomFieldUserApi;
import org.killbill.billing.util.api.TagApiException;
import org.killbill.billing.util.api.TagDefinitionApiException;
import org.killbill.billing.util.api.TagUserApi;
import org.killbill.billing.util.audit.AccountAuditLogs;
import org.killbill.billing.util.callcontext.CallContext;
import org.killbill.billing.util.callcontext.TenantContext;
import org.killbill.billing.util.customfield.CustomField;
import org.killbill.billing.util.entity.Pagination;
import org.killbill.clock.Clock;
import org.killbill.commons.metrics.TimedResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Ordering;
import com.google.inject.Inject;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.TEXT_HTML;
import static javax.ws.rs.core.MediaType.TEXT_PLAIN;
@Path(JaxrsResource.INVOICES_PATH)
@Api(value = JaxrsResource.INVOICES_PATH, description = "Operations on invoices", tags="Invoice")
public class InvoiceResource extends JaxRsResourceBase {
private static final Logger log = LoggerFactory.getLogger(InvoiceResource.class);
private static final String ID_PARAM_NAME = "invoiceId";
private static final String LOCALE_PARAM_NAME = "locale";
private final InvoiceUserApi invoiceApi;
private final TenantUserApi tenantApi;
private final Locale defaultLocale;
private static final Ordering<InvoicePaymentJson> INVOICE_PAYMENT_ORDERING = Ordering.from(new Comparator<InvoicePaymentJson>() {
@Override
public int compare(final InvoicePaymentJson o1, final InvoicePaymentJson o2) {
return o1.getTransactions().get(0).getEffectiveDate().compareTo(o2.getTransactions().get(0).getEffectiveDate());
}
});
@Inject
public InvoiceResource(final AccountUserApi accountUserApi,
final InvoiceUserApi invoiceApi,
final PaymentApi paymentApi,
final Clock clock,
final JaxrsUriBuilder uriBuilder,
final TagUserApi tagUserApi,
final CustomFieldUserApi customFieldUserApi,
final AuditUserApi auditUserApi,
final TenantUserApi tenantApi,
final Context context) {
super(uriBuilder, tagUserApi, customFieldUserApi, auditUserApi, accountUserApi, paymentApi, null, clock, context);
this.invoiceApi = invoiceApi;
this.tenantApi = tenantApi;
this.defaultLocale = Locale.getDefault();
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/")
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve an invoice by id", response = InvoiceJson.class)
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid invoice id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response getInvoice(@PathParam("invoiceId") final UUID invoiceId,
@QueryParam(QUERY_INVOICE_WITH_ITEMS) @DefaultValue("false") final boolean withItems,
@QueryParam(QUERY_INVOICE_WITH_CHILDREN_ITEMS) @DefaultValue("false") final boolean withChildrenItems,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Invoice invoice = invoiceApi.getInvoice(invoiceId, tenantContext);
final List<InvoiceItem> childInvoiceItems = withChildrenItems ? invoiceApi.getInvoiceItemsByParentInvoice(invoice.getId(), tenantContext) : null;
final AccountAuditLogs accountAuditLogs = auditUserApi.getAccountAuditLogs(invoice.getAccountId(), auditMode.getLevel(), tenantContext);
if (invoice == null) {
throw new InvoiceApiException(ErrorCode.INVOICE_NOT_FOUND, invoiceId);
} else {
final InvoiceJson json = new InvoiceJson(invoice, withItems, childInvoiceItems, accountAuditLogs);
return Response.status(Status.OK).entity(json).build();
}
}
@TimedResource
@GET
@Path("/{invoiceNumber:" + NUMBER_PATTERN + "}/")
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve an invoice by number", response = InvoiceJson.class)
@ApiResponses(value = {@ApiResponse(code = 404, message = "Invoice not found")})
public Response getInvoiceByNumber(@PathParam("invoiceNumber") final Integer invoiceNumber,
@QueryParam(QUERY_INVOICE_WITH_ITEMS) @DefaultValue("false") final boolean withItems,
@QueryParam(QUERY_INVOICE_WITH_CHILDREN_ITEMS) @DefaultValue("false") final boolean withChildrenItems,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Invoice invoice = invoiceApi.getInvoiceByNumber(invoiceNumber, tenantContext);
final List<InvoiceItem> childInvoiceItems = withChildrenItems ? invoiceApi.getInvoiceItemsByParentInvoice(invoice.getId(), tenantContext) : null;
final AccountAuditLogs accountAuditLogs = auditUserApi.getAccountAuditLogs(invoice.getAccountId(), auditMode.getLevel(), tenantContext);
if (invoice == null) {
throw new InvoiceApiException(ErrorCode.INVOICE_NOT_FOUND, invoiceNumber);
} else {
final InvoiceJson json = new InvoiceJson(invoice, withItems, childInvoiceItems, accountAuditLogs);
return Response.status(Status.OK).entity(json).build();
}
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/html")
@Produces(TEXT_HTML)
@ApiOperation(value = "Render an invoice as HTML", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 404, message = "Invoice not found")})
public Response getInvoiceAsHTML(@PathParam("invoiceId") final UUID invoiceId,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, IOException, AccountApiException {
return Response.status(Status.OK).entity(invoiceApi.getInvoiceAsHTML(invoiceId, context.createTenantContextNoAccountId(request))).build();
}
@TimedResource
@GET
@Path("/" + PAGINATION)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "List invoices", response = InvoiceJson.class, responseContainer = "List")
@ApiResponses(value = {})
public Response getInvoices(@QueryParam(QUERY_SEARCH_OFFSET) @DefaultValue("0") final Long offset,
@QueryParam(QUERY_SEARCH_LIMIT) @DefaultValue("100") final Long limit,
@QueryParam(QUERY_INVOICE_WITH_ITEMS) @DefaultValue("false") final Boolean withItems,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Pagination<Invoice> invoices = invoiceApi.getInvoices(offset, limit, tenantContext);
final URI nextPageUri = uriBuilder.nextPage(InvoiceResource.class, "getInvoices", invoices.getNextOffset(), limit, ImmutableMap.<String, String>of(QUERY_INVOICE_WITH_ITEMS, withItems.toString(),
QUERY_AUDIT, auditMode.getLevel().toString()));
final AtomicReference<Map<UUID, AccountAuditLogs>> accountsAuditLogs = new AtomicReference<Map<UUID, AccountAuditLogs>>(new HashMap<UUID, AccountAuditLogs>());
return buildStreamingPaginationResponse(invoices,
new Function<Invoice, InvoiceJson>() {
@Override
public InvoiceJson apply(final Invoice invoice) {
// Cache audit logs per account
if (accountsAuditLogs.get().get(invoice.getAccountId()) == null) {
accountsAuditLogs.get().put(invoice.getAccountId(), auditUserApi.getAccountAuditLogs(invoice.getAccountId(), auditMode.getLevel(), tenantContext));
}
return new InvoiceJson(invoice, withItems, null, accountsAuditLogs.get().get(invoice.getAccountId()));
}
},
nextPageUri
);
}
@TimedResource
@GET
@Path("/" + SEARCH + "/{searchKey:" + ANYTHING_PATTERN + "}")
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Search invoices", response = InvoiceJson.class, responseContainer = "List")
@ApiResponses(value = {})
public Response searchInvoices(@PathParam("searchKey") final String searchKey,
@QueryParam(QUERY_SEARCH_OFFSET) @DefaultValue("0") final Long offset,
@QueryParam(QUERY_SEARCH_LIMIT) @DefaultValue("100") final Long limit,
@QueryParam(QUERY_INVOICE_WITH_ITEMS) @DefaultValue("false") final Boolean withItems,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws SubscriptionApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Pagination<Invoice> invoices = invoiceApi.searchInvoices(searchKey, offset, limit, tenantContext);
final URI nextPageUri = uriBuilder.nextPage(InvoiceResource.class, "searchInvoices", invoices.getNextOffset(), limit, ImmutableMap.<String, String>of("searchKey", searchKey,
QUERY_INVOICE_WITH_ITEMS, withItems.toString(),
QUERY_AUDIT, auditMode.getLevel().toString()));
final AtomicReference<Map<UUID, AccountAuditLogs>> accountsAuditLogs = new AtomicReference<Map<UUID, AccountAuditLogs>>(new HashMap<UUID, AccountAuditLogs>());
return buildStreamingPaginationResponse(invoices,
new Function<Invoice, InvoiceJson>() {
@Override
public InvoiceJson apply(final Invoice invoice) {
// Cache audit logs per account
if (accountsAuditLogs.get().get(invoice.getAccountId()) == null) {
accountsAuditLogs.get().put(invoice.getAccountId(), auditUserApi.getAccountAuditLogs(invoice.getAccountId(), auditMode.getLevel(), tenantContext));
}
return new InvoiceJson(invoice, withItems, null, accountsAuditLogs.get().get(invoice.getAccountId()));
}
},
nextPageUri
);
}
@TimedResource
@POST
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Trigger an invoice generation", response = InvoiceJson.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created invoice successfully"),
@ApiResponse(code = 400, message = "Invalid account id or target datetime supplied")})
public Response createFutureInvoice(@ApiParam(required=true) @QueryParam(QUERY_ACCOUNT_ID) final UUID accountId,
@QueryParam(QUERY_TARGET_DATE) final String targetDate,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, InvoiceApiException {
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final LocalDate inputDate = toLocalDate(targetDate);
try {
final Invoice generatedInvoice = invoiceApi.triggerInvoiceGeneration(accountId, inputDate, null,
callContext);
return uriBuilder.buildResponse(uriInfo, InvoiceResource.class, "getInvoice", generatedInvoice.getId(), request);
} catch (InvoiceApiException e) {
if (e.getCode() == ErrorCode.INVOICE_NOTHING_TO_DO.getCode()) {
return Response.status(Status.NOT_FOUND).build();
}
throw e;
}
}
@TimedResource
@POST
@Path("/" + MIGRATION + "/{accountId:" + UUID_PATTERN + "}")
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Create a migration invoice", response = InvoiceJson.class, tags="Invoice")
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created migration invoice successfully"),
@ApiResponse(code = 400, message = "Invalid account id or target datetime supplied")})
public Response createMigrationInvoice(@PathParam("accountId") final UUID accountId,
final List<InvoiceItemJson> items,
@Nullable @QueryParam(QUERY_TARGET_DATE) final String targetDate,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, InvoiceApiException {
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final Account account = accountUserApi.getAccountById(accountId, callContext);
final Iterable<InvoiceItem> sanitizedInvoiceItems = validateSanitizeAndTranformInputItems(account.getCurrency(), items);
final LocalDate resolvedTargetDate = toLocalDateDefaultToday(account, targetDate, callContext);
final UUID invoiceId = invoiceApi.createMigrationInvoice(accountId, resolvedTargetDate, sanitizedInvoiceItems, callContext);
return uriBuilder.buildResponse(uriInfo, InvoiceResource.class, "getInvoice", invoiceId, request);
}
@TimedResource
@POST
@Path("/" + DRY_RUN)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Generate a dryRun invoice", response = InvoiceJson.class)
@ApiResponses(value = {/* @ApiResponse(code = 200, message = "Successful"), */ /* Already added by default */
@ApiResponse(code = 204, message = "Nothing to generate"),
@ApiResponse(code = 400, message = "Invalid account id or target datetime supplied")})
public Response generateDryRunInvoice(@Nullable final InvoiceDryRunJson dryRunSubscriptionSpec,
@ApiParam(required=true) @QueryParam(QUERY_ACCOUNT_ID) final UUID accountId,
@Nullable @QueryParam(QUERY_TARGET_DATE) final String targetDate,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, InvoiceApiException {
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final LocalDate inputDate;
if (dryRunSubscriptionSpec != null) {
if (DryRunType.UPCOMING_INVOICE.name().equals(dryRunSubscriptionSpec.getDryRunType())) {
inputDate = null;
} else if (DryRunType.SUBSCRIPTION_ACTION.name().equals(dryRunSubscriptionSpec.getDryRunType()) && dryRunSubscriptionSpec.getEffectiveDate() != null) {
inputDate = dryRunSubscriptionSpec.getEffectiveDate();
} else {
inputDate = toLocalDate(targetDate);
}
} else {
inputDate = toLocalDate(targetDate);
}
// Passing a null or empty body means we are trying to generate an invoice with a (future) targetDate
// On the other hand if body is not null, we are attempting a dryRun subscription operation
if (dryRunSubscriptionSpec != null && dryRunSubscriptionSpec.getDryRunAction() != null) {
if (SubscriptionEventType.START_BILLING.toString().equals(dryRunSubscriptionSpec.getDryRunAction())) {
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getProductName(), "DryRun subscription product category should be specified");
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getBillingPeriod(), "DryRun subscription billingPeriod should be specified");
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getProductCategory(), "DryRun subscription product category should be specified");
if (dryRunSubscriptionSpec.getProductCategory().equals(ProductCategory.ADD_ON)) {
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getBundleId(), "DryRun bundle ID should be specified");
}
} else if (SubscriptionEventType.CHANGE.toString().equals(dryRunSubscriptionSpec.getDryRunAction())) {
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getProductName(), "DryRun subscription product category should be specified");
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getBillingPeriod(), "DryRun subscription billingPeriod should be specified");
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getSubscriptionId(), "DryRun subscriptionID should be specified");
} else if (SubscriptionEventType.STOP_BILLING.toString().equals(dryRunSubscriptionSpec.getDryRunAction())) {
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getSubscriptionId(), "DryRun subscriptionID should be specified");
}
}
final Account account = accountUserApi.getAccountById(accountId, callContext);
final DryRunArguments dryRunArguments = new DefaultDryRunArguments(dryRunSubscriptionSpec, account);
try {
final Invoice generatedInvoice = invoiceApi.triggerInvoiceGeneration(accountId, inputDate, dryRunArguments,
callContext);
return Response.status(Status.OK).entity(new InvoiceJson(generatedInvoice, true, null, null)).build();
} catch (InvoiceApiException e) {
if (e.getCode() == ErrorCode.INVOICE_NOTHING_TO_DO.getCode()) {
return Response.status(Status.NO_CONTENT).build();
}
throw e;
}
}
@TimedResource
@DELETE
@Path("/{invoiceId:" + UUID_PATTERN + "}" + "/{invoiceItemId:" + UUID_PATTERN + "}/cba")
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Delete a CBA item")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid account id, invoice id or invoice item id supplied"),
@ApiResponse(code = 404, message = "Account or invoice not found")})
public Response deleteCBA(@PathParam("invoiceId") final UUID invoiceId,
@PathParam("invoiceItemId") final UUID invoiceItemId,
@ApiParam(required=true) @QueryParam(QUERY_ACCOUNT_ID) final UUID accountId,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws AccountApiException, InvoiceApiException {
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final Account account = accountUserApi.getAccountById(accountId, callContext);
invoiceApi.deleteCBA(account.getId(), invoiceId, invoiceItemId, callContext);
return Response.status(Status.NO_CONTENT).build();
}
@TimedResource
@POST
@Path("/{invoiceId:" + UUID_PATTERN + "}")
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Adjust an invoice item", response = InvoiceJson.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created adjustment Successfully"),
@ApiResponse(code = 400, message = "Invalid account id, invoice id or invoice item id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response adjustInvoiceItem(@PathParam("invoiceId") final UUID invoiceId,
final InvoiceItemJson json,
@QueryParam(QUERY_REQUESTED_DT) final String requestedDateTimeString,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, InvoiceApiException {
verifyNonNullOrEmpty(json, "InvoiceItemJson body should be specified");
verifyNonNullOrEmpty(json.getAccountId(), "InvoiceItemJson accountId needs to be set",
json.getInvoiceItemId(), "InvoiceItemJson invoiceItemId needs to be set");
final UUID accountId = json.getAccountId();
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final LocalDate requestedDate = toLocalDateDefaultToday(accountId, requestedDateTimeString, callContext);
final InvoiceItem adjustmentItem;
if (json.getAmount() == null) {
adjustmentItem = invoiceApi.insertInvoiceItemAdjustment(accountId,
invoiceId,
json.getInvoiceItemId(),
requestedDate,
json.getDescription(),
json.getItemDetails(),
callContext);
} else {
adjustmentItem = invoiceApi.insertInvoiceItemAdjustment(accountId,
invoiceId,
json.getInvoiceItemId(),
requestedDate,
json.getAmount(),
json.getCurrency(),
json.getDescription(),
json.getItemDetails(),
callContext);
}
if (adjustmentItem == null) {
return Response.status(Status.NOT_FOUND).build();
} else {
return uriBuilder.buildResponse(uriInfo, InvoiceResource.class, "getInvoice", adjustmentItem.getInvoiceId(), request);
}
}
@TimedResource
@POST
@Produces(APPLICATION_JSON)
@Consumes(APPLICATION_JSON)
@Path("/" + CHARGES + "/{accountId:" + UUID_PATTERN + "}")
@ApiOperation(value = "Create external charge(s)", response = InvoiceItemJson.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created external charge Successfully"),
@ApiResponse(code = 400, message = "Invalid account id supplied"),
@ApiResponse(code = 404, message = "Account not found")})
public Response createExternalCharges(@PathParam("accountId") final UUID accountId,
final List<InvoiceItemJson> externalChargesJson,
@QueryParam(QUERY_REQUESTED_DT) final String requestedDateTimeString,
@QueryParam(QUERY_PAY_INVOICE) @DefaultValue("false") final Boolean payInvoice,
@QueryParam(QUERY_PLUGIN_PROPERTY) final List<String> pluginPropertiesString,
@QueryParam(QUERY_AUTO_COMMIT) @DefaultValue("false") final Boolean autoCommit,
@QueryParam(QUERY_PAYMENT_EXTERNAL_KEY) final String paymentExternalKey,
@QueryParam(QUERY_TRANSACTION_EXTERNAL_KEY) final String transactionExternalKey,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final UriInfo uriInfo,
@javax.ws.rs.core.Context final HttpServletRequest request) throws AccountApiException, InvoiceApiException, PaymentApiException {
final Iterable<PluginProperty> pluginProperties = extractPluginProperties(pluginPropertiesString);
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final Account account = accountUserApi.getAccountById(accountId, callContext);
final Iterable<InvoiceItem> sanitizedExternalChargesJson = validateSanitizeAndTranformInputItems(account.getCurrency(), externalChargesJson);
// Get the effective date of the external charge, in the account timezone
final LocalDate requestedDate = toLocalDateDefaultToday(account, requestedDateTimeString, callContext);
final List<InvoiceItem> createdExternalCharges = invoiceApi.insertExternalCharges(account.getId(), requestedDate, sanitizedExternalChargesJson, autoCommit, callContext);
// if all createdExternalCharges point to the same invoiceId, use the provided paymentExternalKey and / or transactionExternalKey
final boolean haveSameInvoiceId = Iterables.all(createdExternalCharges, new Predicate<InvoiceItem>() {
@Override
public boolean apply(final InvoiceItem input) {
return input.getInvoiceId().equals(createdExternalCharges.get(0).getInvoiceId());
}
});
if (payInvoice) {
final Collection<UUID> paidInvoices = new HashSet<UUID>();
for (final InvoiceItem externalCharge : createdExternalCharges) {
if (!paidInvoices.contains(externalCharge.getInvoiceId())) {
paidInvoices.add(externalCharge.getInvoiceId());
final Invoice invoice = invoiceApi.getInvoice(externalCharge.getInvoiceId(), callContext);
createPurchaseForInvoice(account, invoice.getId(), invoice.getBalance(), account.getPaymentMethodId(), false,
(haveSameInvoiceId && paymentExternalKey != null) ? paymentExternalKey : null,
(haveSameInvoiceId && transactionExternalKey != null) ? transactionExternalKey : null,
pluginProperties, callContext);
}
}
}
final List<InvoiceItemJson> createdExternalChargesJson = Lists.<InvoiceItem, InvoiceItemJson>transform(createdExternalCharges,
new Function<InvoiceItem, InvoiceItemJson>() {
@Override
public InvoiceItemJson apply(final InvoiceItem input) {
return new InvoiceItemJson(input);
}
}
);
return Response.status(Status.OK).entity(createdExternalChargesJson).build();
}
private Iterable<InvoiceItem> validateSanitizeAndTranformInputItems(final Currency accountCurrency, final Iterable<InvoiceItemJson> inputItems) throws InvoiceApiException {
try {
final Iterable<InvoiceItemJson> sanitized = Iterables.transform(inputItems, new Function<InvoiceItemJson, InvoiceItemJson>() {
@Override
public InvoiceItemJson apply(final InvoiceItemJson input) {
if (input.getCurrency() != null) {
if (!input.getCurrency().equals(accountCurrency)) {
throw new IllegalArgumentException(input.getCurrency().toString());
}
return input;
} else {
return new InvoiceItemJson(null,
input.getInvoiceId(),
null,
input.getAccountId(),
input.getChildAccountId(),
input.getBundleId(),
input.getSubscriptionId(),
input.getPlanName(),
input.getPhaseName(),
input.getUsageName(),
input.getPrettyPlanName(),
input.getPrettyPhaseName(),
input.getPrettyUsageName(),
input.getItemType(),
input.getDescription(),
input.getStartDate(),
input.getEndDate(),
input.getAmount(),
input.getRate(),
accountCurrency,
input.getQuantity(),
input.getItemDetails(),
null,
null);
}
}
});
return Iterables.transform(sanitized, new Function<InvoiceItemJson, InvoiceItem>() {
@Override
public InvoiceItem apply(final InvoiceItemJson input) {
return input.toInvoiceItem();
}
});
} catch (IllegalArgumentException e) {
throw new InvoiceApiException(ErrorCode.CURRENCY_INVALID, accountCurrency, e.getMessage());
}
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + PAYMENTS)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve payments associated with an invoice", response = InvoicePaymentJson.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid invoice id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response getPaymentsForInvoice(@PathParam("invoiceId") final UUID invoiceId,
@QueryParam(QUERY_WITH_PLUGIN_INFO) @DefaultValue("false") final Boolean withPluginInfo,
@QueryParam(QUERY_WITH_ATTEMPTS) @DefaultValue("false") final Boolean withAttempts,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws PaymentApiException, InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Invoice invoice = invoiceApi.getInvoice(invoiceId, tenantContext);
// Extract unique set of paymentId for this invoice
final Set<UUID> invoicePaymentIds = ImmutableSet.copyOf(Iterables.transform(invoice.getPayments(), new Function<InvoicePayment, UUID>() {
@Override
public UUID apply(final InvoicePayment input) {
return input.getPaymentId();
}
}));
if (invoicePaymentIds.isEmpty()) {
return Response.status(Status.OK).entity(ImmutableList.<InvoicePaymentJson>of()).build();
}
final List<Payment> payments = new ArrayList<Payment>();
for (final UUID paymentId : invoicePaymentIds) {
final Payment payment = paymentApi.getPayment(paymentId, withPluginInfo, withAttempts, ImmutableList.<PluginProperty>of(), tenantContext);
payments.add(payment);
}
final Iterable<InvoicePaymentJson> result = INVOICE_PAYMENT_ORDERING.sortedCopy(Iterables.transform(payments, new Function<Payment, InvoicePaymentJson>() {
@Override
public InvoicePaymentJson apply(final Payment input) {
return new InvoicePaymentJson(input, invoice.getId(), null);
}
}));
return Response.status(Status.OK).entity(result).build();
}
@TimedResource
@POST
@Produces(APPLICATION_JSON)
@Consumes(APPLICATION_JSON)
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + PAYMENTS)
@ApiOperation(value = "Trigger a payment for invoice", response = InvoicePaymentJson.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created payment Successfully"),
@ApiResponse(code = 204, message = "Nothing to pay for"),
@ApiResponse(code = 400, message = "Invalid account id or invoice id supplied"),
@ApiResponse(code = 404, message = "Account not found")})
public Response createInstantPayment(@PathParam("invoiceId") final UUID invoiceId,
final InvoicePaymentJson payment,
@QueryParam(QUERY_PAYMENT_EXTERNAL) @DefaultValue("false") final Boolean externalPayment,
@QueryParam(QUERY_PLUGIN_PROPERTY) final List<String> pluginPropertiesString,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, PaymentApiException {
verifyNonNullOrEmpty(payment, "InvoicePaymentJson body should be specified");
verifyNonNullOrEmpty(payment.getAccountId(), "InvoicePaymentJson accountId needs to be set",
payment.getTargetInvoiceId(), "InvoicePaymentJson targetInvoiceId needs to be set",
payment.getPurchasedAmount(), "InvoicePaymentJson purchasedAmount needs to be set");
Preconditions.checkArgument(!externalPayment || payment.getPaymentMethodId() == null, "InvoicePaymentJson should not contain a paymentMethodId when this is an external payment");
final Iterable<PluginProperty> pluginProperties = extractPluginProperties(pluginPropertiesString);
final CallContext callContext = context.createCallContextNoAccountId(createdBy, reason, comment, request);
final Account account = accountUserApi.getAccountById(payment.getAccountId(), callContext);
final UUID paymentMethodId = externalPayment ? null :
(payment.getPaymentMethodId() != null ? payment.getPaymentMethodId() : account.getPaymentMethodId());
final Payment result = createPurchaseForInvoice(account, invoiceId, payment.getPurchasedAmount(), paymentMethodId, externalPayment,
payment.getPaymentExternalKey(), null, pluginProperties, callContext);
return result != null ?
uriBuilder.buildResponse(uriInfo, InvoicePaymentResource.class, "getInvoicePayment", result.getId(), request) :
Response.status(Status.NO_CONTENT).build();
}
@TimedResource
@GET
@Path("/" + INVOICE_TRANSLATION + "/{locale:" + ANYTHING_PATTERN + "}/")
@Produces(TEXT_PLAIN)
@ApiOperation(value = "Retrieves the invoice translation for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid locale supplied"),
@ApiResponse(code = 404, message = "Translation not found")})
public Response getInvoiceTranslation(@PathParam("locale") final String localeStr,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
return getTemplateResource(localeStr, TenantKey.INVOICE_TRANSLATION_, request);
}
@TimedResource
@POST
@Produces(TEXT_PLAIN)
@Consumes(TEXT_PLAIN)
@Path("/" + INVOICE_TRANSLATION + "/{locale:" + ANYTHING_PATTERN + "}/")
@ApiOperation(value = "Upload the invoice translation for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Uploaded invoice translation Successfully")})
public Response uploadInvoiceTranslation(@PathParam("locale") final String localeStr,
final String invoiceTranslation,
@QueryParam(QUERY_DELETE_IF_EXISTS) @DefaultValue("false") final boolean deleteIfExists,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws Exception {
return uploadTemplateResource(invoiceTranslation,
localeStr,
deleteIfExists,
TenantKey.INVOICE_TRANSLATION_,
"getInvoiceTranslation",
createdBy,
reason,
comment,
request,
uriInfo);
}
@TimedResource
@GET
@Path("/" + INVOICE_CATALOG_TRANSLATION + "/{locale:" + ANYTHING_PATTERN + "}/")
@Produces(TEXT_PLAIN)
@ApiOperation(value = "Retrieves the catalog translation for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid locale supplied"),
@ApiResponse(code = 404, message = "Template not found")})
public Response getCatalogTranslation(@PathParam("locale") final String localeStr,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
return getTemplateResource(localeStr, TenantKey.CATALOG_TRANSLATION_, request);
}
@TimedResource
@POST
@Produces(TEXT_PLAIN)
@Consumes(TEXT_PLAIN)
@Path("/" + INVOICE_CATALOG_TRANSLATION + "/{locale:" + ANYTHING_PATTERN + "}/")
@ApiOperation(value = "Upload the catalog translation for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Uploaded catalog translation Successfully")})
public Response uploadCatalogTranslation(@PathParam("locale") final String localeStr,
final String catalogTranslation,
@QueryParam(QUERY_DELETE_IF_EXISTS) @DefaultValue("false") final boolean deleteIfExists,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws Exception {
return uploadTemplateResource(catalogTranslation,
localeStr,
deleteIfExists,
TenantKey.CATALOG_TRANSLATION_,
"getCatalogTranslation",
createdBy,
reason,
comment,
request,
uriInfo);
}
@TimedResource
@GET
@Path("/" + INVOICE_TEMPLATE)
@Produces(TEXT_HTML)
@ApiOperation(value = "Retrieves the invoice template for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 404, message = "Template not found")})
public Response getInvoiceTemplate(@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
return getTemplateResource(null, TenantKey.INVOICE_TEMPLATE, request);
}
@TimedResource
@POST
@Produces(TEXT_HTML)
@Consumes(TEXT_HTML)
@Path("/" + INVOICE_TEMPLATE)
@ApiOperation(value = "Upload the invoice template for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Uploaded invoice template Successfully")})
public Response uploadInvoiceTemplate(final String catalogTranslation,
@QueryParam(QUERY_DELETE_IF_EXISTS) @DefaultValue("false") final boolean deleteIfExists,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws Exception {
return uploadTemplateResource(catalogTranslation,
null,
deleteIfExists,
TenantKey.INVOICE_TEMPLATE,
"getInvoiceTemplate",
createdBy,
reason,
comment,
request,
uriInfo);
}
@TimedResource
@GET
@Path("/" + INVOICE_MP_TEMPLATE + "/{locale:" + ANYTHING_PATTERN + "}/")
@Produces(TEXT_HTML)
@ApiOperation(value = "Retrieves the manualPay invoice template for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 404, message = "Template not found")})
public Response getInvoiceMPTemplate(@PathParam("locale") final String localeStr,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
return getTemplateResource(null, TenantKey.INVOICE_MP_TEMPLATE, request);
}
@TimedResource
@POST
@Produces(TEXT_HTML)
@Consumes(TEXT_HTML)
@Path("/" + INVOICE_MP_TEMPLATE)
@ApiOperation(value = "Upload the manualPay invoice template for the tenant", response = String.class)
public Response uploadInvoiceMPTemplate(final String catalogTranslation,
@QueryParam(QUERY_DELETE_IF_EXISTS) @DefaultValue("false") final boolean deleteIfExists,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws Exception {
return uploadTemplateResource(catalogTranslation,
null,
deleteIfExists,
TenantKey.INVOICE_MP_TEMPLATE,
"getInvoiceMPTemplate",
createdBy,
reason,
comment,
request,
uriInfo);
}
private Response uploadTemplateResource(final String templateResource,
@Nullable final String localeStr,
final boolean deleteIfExists,
final TenantKey tenantKey,
final String getMethodStr,
final String createdBy,
final String reason,
final String comment,
final HttpServletRequest request,
final UriInfo uriInfo) throws Exception {
final String tenantKeyStr;
if (localeStr != null) {
// Validation purpose: Will throw bad stream
final InputStream stream = new ByteArrayInputStream(templateResource.getBytes());
new PropertyResourceBundle(stream);
final Locale locale = localeStr != null ? LocaleUtils.toLocale(localeStr) : defaultLocale;
tenantKeyStr = LocaleUtils.localeString(locale, tenantKey.toString());
} else {
tenantKeyStr = tenantKey.toString();
}
final CallContext callContext = context.createCallContextNoAccountId(createdBy, reason, comment, request);
if (!tenantApi.getTenantValuesForKey(tenantKeyStr, callContext).isEmpty()) {
if (deleteIfExists) {
tenantApi.deleteTenantKey(tenantKeyStr, callContext);
} else {
return Response.status(Status.BAD_REQUEST).build();
}
}
tenantApi.addTenantKeyValue(tenantKeyStr, templateResource, callContext);
return uriBuilder.buildResponse(uriInfo, InvoiceResource.class, getMethodStr, localeStr, request);
}
private Response getTemplateResource(@Nullable final String localeStr,
final TenantKey tenantKey,
final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final String tenantKeyStr = localeStr != null ?
LocaleUtils.localeString(LocaleUtils.toLocale(localeStr), tenantKey.toString()) :
tenantKey.toString();
final List<String> result = tenantApi.getTenantValuesForKey(tenantKeyStr, tenantContext);
return result.isEmpty() ? Response.status(Status.NOT_FOUND).build() : Response.status(Status.OK).entity(result.get(0)).build();
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve invoice custom fields", response = CustomFieldJson.class, responseContainer = "List", nickname = "getInvoiceCustomFields")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response getCustomFields(@PathParam(ID_PARAM_NAME) final UUID id,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) {
return super.getCustomFields(id, auditMode, context.createTenantContextNoAccountId(request));
}
@TimedResource
@POST
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Add custom fields to invoice", response = CustomField.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 201, message = "Custom field created successfully"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response createInvoiceCustomFields(@PathParam(ID_PARAM_NAME) final UUID id,
final List<CustomFieldJson> customFields,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws CustomFieldApiException {
return super.createCustomFields(id, customFields,
context.createCallContextNoAccountId(createdBy, reason, comment, request), uriInfo, request);
}
@TimedResource
@PUT
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Modify custom fields to invoice")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response modifyInvoiceCustomFields(@PathParam(ID_PARAM_NAME) final UUID id,
final List<CustomFieldJson> customFields,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws CustomFieldApiException {
return super.modifyCustomFields(id, customFields,
context.createCallContextNoAccountId(createdBy, reason, comment, request));
}
@TimedResource
@DELETE
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Remove custom fields from invoice")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response deleteInvoiceCustomFields(@PathParam(ID_PARAM_NAME) final UUID id,
@QueryParam(QUERY_CUSTOM_FIELD) final List<UUID> customFieldList,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws CustomFieldApiException {
return super.deleteCustomFields(id, customFieldList,
context.createCallContextNoAccountId(createdBy, reason, comment, request));
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + TAGS)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve invoice tags", response = TagJson.class, responseContainer = "List", nickname = "getInvoiceTags")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid invoice id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response getTags(@PathParam(ID_PARAM_NAME) final UUID invoiceId,
@QueryParam(QUERY_INCLUDED_DELETED) @DefaultValue("false") final Boolean includedDeleted,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws TagDefinitionApiException, InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Invoice invoice = invoiceApi.getInvoice(invoiceId, tenantContext);
return super.getTags(invoice.getAccountId(), invoiceId, auditMode, includedDeleted, tenantContext);
}
@TimedResource
@POST
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + TAGS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Add tags to invoice", response = TagJson.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 201, message = "Tag created successfully"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response createInvoiceTags(@PathParam(ID_PARAM_NAME) final UUID id,
final List<UUID> tagList,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final UriInfo uriInfo,
@javax.ws.rs.core.Context final HttpServletRequest request) throws TagApiException {
return super.createTags(id, tagList, uriInfo,
context.createCallContextNoAccountId(createdBy, reason, comment, request), request);
}
@TimedResource
@DELETE
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + TAGS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Remove tags from invoice")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response deleteInvoiceTags(@PathParam(ID_PARAM_NAME) final UUID id,
@QueryParam(QUERY_TAG) final List<UUID> tagList,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws TagApiException {
return super.deleteTags(id, tagList,
context.createCallContextNoAccountId(createdBy, reason, comment, request));
}
@TimedResource
@PUT
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + COMMIT_INVOICE)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Perform the invoice status transition from DRAFT to COMMITTED")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response commitInvoice(@PathParam("invoiceId") final UUID invoiceId,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws InvoiceApiException {
final CallContext callContext = context.createCallContextNoAccountId(createdBy, reason, comment, request);
invoiceApi.commitInvoice(invoiceId, callContext);
return Response.status(Status.NO_CONTENT).build();
}
@TimedResource
@PUT
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + VOID_INVOICE)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Perform the action of voiding an invoice")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response voidInvoice(@PathParam("invoiceId") final UUID invoiceId,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws InvoiceApiException {
final CallContext callContext = context.createCallContextNoAccountId(createdBy, reason, comment, request);
invoiceApi.voidInvoice(invoiceId, callContext);
return Response.status(Status.NO_CONTENT).build();
}
@Override
protected ObjectType getObjectType() {
return ObjectType.INVOICE;
}
private static class DefaultDryRunArguments implements DryRunArguments {
private final DryRunType dryRunType;
private final SubscriptionEventType action;
private final UUID subscriptionId;
private final LocalDate effectiveDate;
private final PlanPhaseSpecifier specifier;
private final UUID bundleId;
private final BillingActionPolicy billingPolicy;
private final List<PlanPhasePriceOverride> overrides;
public DefaultDryRunArguments(final InvoiceDryRunJson input, final Account account) {
if (input == null) {
this.dryRunType = DryRunType.TARGET_DATE;
this.action = null;
this.subscriptionId = null;
this.effectiveDate = null;
this.specifier = null;
this.bundleId = null;
this.billingPolicy = null;
this.overrides = null;
} else {
this.dryRunType = input.getDryRunType() != null ? input.getDryRunType() : DryRunType.TARGET_DATE;
this.action = input.getDryRunAction() != null ? input.getDryRunAction() : null;
this.subscriptionId = input.getSubscriptionId();
this.bundleId = input.getBundleId();
this.effectiveDate = input.getEffectiveDate();
this.billingPolicy = input.getBillingPolicy() != null ? input.getBillingPolicy() : null;
final PlanPhaseSpecifier planPhaseSpecifier = (input.getProductName() != null &&
input.getProductCategory() != null &&
input.getBillingPeriod() != null) ?
new PlanPhaseSpecifier(input.getProductName(),
input.getBillingPeriod(),
input.getPriceListName(),
input.getPhaseType() != null ? input.getPhaseType() : null) :
null;
this.specifier = planPhaseSpecifier;
this.overrides = input.getPriceOverrides() != null ?
ImmutableList.copyOf(Iterables.transform(input.getPriceOverrides(), new Function<PhasePriceOverrideJson, PlanPhasePriceOverride>() {
@Nullable
@Override
public PlanPhasePriceOverride apply(@Nullable final PhasePriceOverrideJson input) {
if (input.getPhaseName() != null) {
return new DefaultPlanPhasePriceOverride(input.getPhaseName(), account.getCurrency(), input.getFixedPrice(), input.getRecurringPrice(), null);
} else {
return new DefaultPlanPhasePriceOverride(planPhaseSpecifier, account.getCurrency(), input.getFixedPrice(), input.getRecurringPrice(), null);
}
}
})) : ImmutableList.<PlanPhasePriceOverride>of();
}
}
@Override
public DryRunType getDryRunType() {
return dryRunType;
}
@Override
public PlanPhaseSpecifier getPlanPhaseSpecifier() {
return specifier;
}
@Override
public SubscriptionEventType getAction() {
return action;
}
@Override
public UUID getSubscriptionId() {
return subscriptionId;
}
@Override
public LocalDate getEffectiveDate() {
return effectiveDate;
}
@Override
public UUID getBundleId() {
return bundleId;
}
@Override
public BillingActionPolicy getBillingActionPolicy() {
return billingPolicy;
}
@Override
public List<PlanPhasePriceOverride> getPlanPhasePriceOverrides() {
return overrides;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("DefaultDryRunArguments{");
sb.append("dryRunType=").append(dryRunType);
sb.append(", action=").append(action);
sb.append(", subscriptionId=").append(subscriptionId);
sb.append(", effectiveDate=").append(effectiveDate);
sb.append(", specifier=").append(specifier);
sb.append(", bundleId=").append(bundleId);
sb.append(", billingPolicy=").append(billingPolicy);
sb.append(", overrides=").append(overrides);
sb.append('}');
return sb.toString();
}
}
}
|
jaxrs/src/main/java/org/killbill/billing/jaxrs/resources/InvoiceResource.java
|
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2018 Groupon, Inc
* Copyright 2014-2018 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.jaxrs.resources;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.PropertyResourceBundle;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import org.joda.time.LocalDate;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.ObjectType;
import org.killbill.billing.account.api.Account;
import org.killbill.billing.account.api.AccountApiException;
import org.killbill.billing.account.api.AccountUserApi;
import org.killbill.billing.catalog.DefaultPlanPhasePriceOverride;
import org.killbill.billing.catalog.api.BillingActionPolicy;
import org.killbill.billing.catalog.api.Currency;
import org.killbill.billing.catalog.api.PlanPhasePriceOverride;
import org.killbill.billing.catalog.api.PlanPhaseSpecifier;
import org.killbill.billing.catalog.api.ProductCategory;
import org.killbill.billing.entitlement.api.SubscriptionApiException;
import org.killbill.billing.entitlement.api.SubscriptionEventType;
import org.killbill.billing.invoice.api.DryRunArguments;
import org.killbill.billing.invoice.api.DryRunType;
import org.killbill.billing.invoice.api.Invoice;
import org.killbill.billing.invoice.api.InvoiceApiException;
import org.killbill.billing.invoice.api.InvoiceItem;
import org.killbill.billing.invoice.api.InvoicePayment;
import org.killbill.billing.invoice.api.InvoiceUserApi;
import org.killbill.billing.jaxrs.json.CustomFieldJson;
import org.killbill.billing.jaxrs.json.InvoiceDryRunJson;
import org.killbill.billing.jaxrs.json.InvoiceItemJson;
import org.killbill.billing.jaxrs.json.InvoiceJson;
import org.killbill.billing.jaxrs.json.InvoicePaymentJson;
import org.killbill.billing.jaxrs.json.PhasePriceOverrideJson;
import org.killbill.billing.jaxrs.json.TagJson;
import org.killbill.billing.jaxrs.util.Context;
import org.killbill.billing.jaxrs.util.JaxrsUriBuilder;
import org.killbill.billing.payment.api.Payment;
import org.killbill.billing.payment.api.PaymentApi;
import org.killbill.billing.payment.api.PaymentApiException;
import org.killbill.billing.payment.api.PluginProperty;
import org.killbill.billing.tenant.api.TenantApiException;
import org.killbill.billing.tenant.api.TenantKV.TenantKey;
import org.killbill.billing.tenant.api.TenantUserApi;
import org.killbill.billing.util.LocaleUtils;
import org.killbill.billing.util.api.AuditUserApi;
import org.killbill.billing.util.api.CustomFieldApiException;
import org.killbill.billing.util.api.CustomFieldUserApi;
import org.killbill.billing.util.api.TagApiException;
import org.killbill.billing.util.api.TagDefinitionApiException;
import org.killbill.billing.util.api.TagUserApi;
import org.killbill.billing.util.audit.AccountAuditLogs;
import org.killbill.billing.util.callcontext.CallContext;
import org.killbill.billing.util.callcontext.TenantContext;
import org.killbill.billing.util.customfield.CustomField;
import org.killbill.billing.util.entity.Pagination;
import org.killbill.clock.Clock;
import org.killbill.commons.metrics.TimedResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Ordering;
import com.google.inject.Inject;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.TEXT_HTML;
import static javax.ws.rs.core.MediaType.TEXT_PLAIN;
@Path(JaxrsResource.INVOICES_PATH)
@Api(value = JaxrsResource.INVOICES_PATH, description = "Operations on invoices", tags="Invoice")
public class InvoiceResource extends JaxRsResourceBase {
private static final Logger log = LoggerFactory.getLogger(InvoiceResource.class);
private static final String ID_PARAM_NAME = "invoiceId";
private static final String LOCALE_PARAM_NAME = "locale";
private final InvoiceUserApi invoiceApi;
private final TenantUserApi tenantApi;
private final Locale defaultLocale;
private static final Ordering<InvoicePaymentJson> INVOICE_PAYMENT_ORDERING = Ordering.from(new Comparator<InvoicePaymentJson>() {
@Override
public int compare(final InvoicePaymentJson o1, final InvoicePaymentJson o2) {
return o1.getTransactions().get(0).getEffectiveDate().compareTo(o2.getTransactions().get(0).getEffectiveDate());
}
});
@Inject
public InvoiceResource(final AccountUserApi accountUserApi,
final InvoiceUserApi invoiceApi,
final PaymentApi paymentApi,
final Clock clock,
final JaxrsUriBuilder uriBuilder,
final TagUserApi tagUserApi,
final CustomFieldUserApi customFieldUserApi,
final AuditUserApi auditUserApi,
final TenantUserApi tenantApi,
final Context context) {
super(uriBuilder, tagUserApi, customFieldUserApi, auditUserApi, accountUserApi, paymentApi, null, clock, context);
this.invoiceApi = invoiceApi;
this.tenantApi = tenantApi;
this.defaultLocale = Locale.getDefault();
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/")
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve an invoice by id", response = InvoiceJson.class)
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid invoice id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response getInvoice(@PathParam("invoiceId") final UUID invoiceId,
@QueryParam(QUERY_INVOICE_WITH_ITEMS) @DefaultValue("false") final boolean withItems,
@QueryParam(QUERY_INVOICE_WITH_CHILDREN_ITEMS) @DefaultValue("false") final boolean withChildrenItems,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Invoice invoice = invoiceApi.getInvoice(invoiceId, tenantContext);
final List<InvoiceItem> childInvoiceItems = withChildrenItems ? invoiceApi.getInvoiceItemsByParentInvoice(invoice.getId(), tenantContext) : null;
final AccountAuditLogs accountAuditLogs = auditUserApi.getAccountAuditLogs(invoice.getAccountId(), auditMode.getLevel(), tenantContext);
if (invoice == null) {
throw new InvoiceApiException(ErrorCode.INVOICE_NOT_FOUND, invoiceId);
} else {
final InvoiceJson json = new InvoiceJson(invoice, withItems, childInvoiceItems, accountAuditLogs);
return Response.status(Status.OK).entity(json).build();
}
}
@TimedResource
@GET
@Path("/{invoiceNumber:" + NUMBER_PATTERN + "}/")
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve an invoice by number", response = InvoiceJson.class)
@ApiResponses(value = {@ApiResponse(code = 404, message = "Invoice not found")})
public Response getInvoiceByNumber(@PathParam("invoiceNumber") final Integer invoiceNumber,
@QueryParam(QUERY_INVOICE_WITH_ITEMS) @DefaultValue("false") final boolean withItems,
@QueryParam(QUERY_INVOICE_WITH_CHILDREN_ITEMS) @DefaultValue("false") final boolean withChildrenItems,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Invoice invoice = invoiceApi.getInvoiceByNumber(invoiceNumber, tenantContext);
final List<InvoiceItem> childInvoiceItems = withChildrenItems ? invoiceApi.getInvoiceItemsByParentInvoice(invoice.getId(), tenantContext) : null;
final AccountAuditLogs accountAuditLogs = auditUserApi.getAccountAuditLogs(invoice.getAccountId(), auditMode.getLevel(), tenantContext);
if (invoice == null) {
throw new InvoiceApiException(ErrorCode.INVOICE_NOT_FOUND, invoiceNumber);
} else {
final InvoiceJson json = new InvoiceJson(invoice, withItems, childInvoiceItems, accountAuditLogs);
return Response.status(Status.OK).entity(json).build();
}
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/html")
@Produces(TEXT_HTML)
@ApiOperation(value = "Render an invoice as HTML", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 404, message = "Invoice not found")})
public Response getInvoiceAsHTML(@PathParam("invoiceId") final UUID invoiceId,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, IOException, AccountApiException {
return Response.status(Status.OK).entity(invoiceApi.getInvoiceAsHTML(invoiceId, context.createTenantContextNoAccountId(request))).build();
}
@TimedResource
@GET
@Path("/" + PAGINATION)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "List invoices", response = InvoiceJson.class, responseContainer = "List")
@ApiResponses(value = {})
public Response getInvoices(@QueryParam(QUERY_SEARCH_OFFSET) @DefaultValue("0") final Long offset,
@QueryParam(QUERY_SEARCH_LIMIT) @DefaultValue("100") final Long limit,
@QueryParam(QUERY_INVOICE_WITH_ITEMS) @DefaultValue("false") final Boolean withItems,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Pagination<Invoice> invoices = invoiceApi.getInvoices(offset, limit, tenantContext);
final URI nextPageUri = uriBuilder.nextPage(InvoiceResource.class, "getInvoices", invoices.getNextOffset(), limit, ImmutableMap.<String, String>of(QUERY_INVOICE_WITH_ITEMS, withItems.toString(),
QUERY_AUDIT, auditMode.getLevel().toString()));
final AtomicReference<Map<UUID, AccountAuditLogs>> accountsAuditLogs = new AtomicReference<Map<UUID, AccountAuditLogs>>(new HashMap<UUID, AccountAuditLogs>());
return buildStreamingPaginationResponse(invoices,
new Function<Invoice, InvoiceJson>() {
@Override
public InvoiceJson apply(final Invoice invoice) {
// Cache audit logs per account
if (accountsAuditLogs.get().get(invoice.getAccountId()) == null) {
accountsAuditLogs.get().put(invoice.getAccountId(), auditUserApi.getAccountAuditLogs(invoice.getAccountId(), auditMode.getLevel(), tenantContext));
}
return new InvoiceJson(invoice, withItems, null, accountsAuditLogs.get().get(invoice.getAccountId()));
}
},
nextPageUri
);
}
@TimedResource
@GET
@Path("/" + SEARCH + "/{searchKey:" + ANYTHING_PATTERN + "}")
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Search invoices", response = InvoiceJson.class, responseContainer = "List")
@ApiResponses(value = {})
public Response searchInvoices(@PathParam("searchKey") final String searchKey,
@QueryParam(QUERY_SEARCH_OFFSET) @DefaultValue("0") final Long offset,
@QueryParam(QUERY_SEARCH_LIMIT) @DefaultValue("100") final Long limit,
@QueryParam(QUERY_INVOICE_WITH_ITEMS) @DefaultValue("false") final Boolean withItems,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws SubscriptionApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Pagination<Invoice> invoices = invoiceApi.searchInvoices(searchKey, offset, limit, tenantContext);
final URI nextPageUri = uriBuilder.nextPage(InvoiceResource.class, "searchInvoices", invoices.getNextOffset(), limit, ImmutableMap.<String, String>of("searchKey", searchKey,
QUERY_INVOICE_WITH_ITEMS, withItems.toString(),
QUERY_AUDIT, auditMode.getLevel().toString()));
final AtomicReference<Map<UUID, AccountAuditLogs>> accountsAuditLogs = new AtomicReference<Map<UUID, AccountAuditLogs>>(new HashMap<UUID, AccountAuditLogs>());
return buildStreamingPaginationResponse(invoices,
new Function<Invoice, InvoiceJson>() {
@Override
public InvoiceJson apply(final Invoice invoice) {
// Cache audit logs per account
if (accountsAuditLogs.get().get(invoice.getAccountId()) == null) {
accountsAuditLogs.get().put(invoice.getAccountId(), auditUserApi.getAccountAuditLogs(invoice.getAccountId(), auditMode.getLevel(), tenantContext));
}
return new InvoiceJson(invoice, withItems, null, accountsAuditLogs.get().get(invoice.getAccountId()));
}
},
nextPageUri
);
}
@TimedResource
@POST
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Trigger an invoice generation", response = InvoiceJson.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created invoice successfully"),
@ApiResponse(code = 400, message = "Invalid account id or target datetime supplied")})
public Response createFutureInvoice(@ApiParam(required=true) @QueryParam(QUERY_ACCOUNT_ID) final UUID accountId,
@QueryParam(QUERY_TARGET_DATE) final String targetDate,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, InvoiceApiException {
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final LocalDate inputDate = toLocalDate(targetDate);
try {
final Invoice generatedInvoice = invoiceApi.triggerInvoiceGeneration(accountId, inputDate, null,
callContext);
return uriBuilder.buildResponse(uriInfo, InvoiceResource.class, "getInvoice", generatedInvoice.getId(), request);
} catch (InvoiceApiException e) {
if (e.getCode() == ErrorCode.INVOICE_NOTHING_TO_DO.getCode()) {
return Response.status(Status.NOT_FOUND).build();
}
throw e;
}
}
@TimedResource
@POST
@Path("/" + MIGRATION + "/{accountId:" + UUID_PATTERN + "}")
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Create a migration invoice", response = InvoiceJson.class, tags="Invoice")
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created migration invoice successfully"),
@ApiResponse(code = 400, message = "Invalid account id or target datetime supplied")})
public Response createMigrationInvoice(@PathParam("accountId") final UUID accountId,
final List<InvoiceItemJson> items,
@Nullable @QueryParam(QUERY_TARGET_DATE) final String targetDate,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, InvoiceApiException {
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final Account account = accountUserApi.getAccountById(accountId, callContext);
final Iterable<InvoiceItem> sanitizedInvoiceItems = validateSanitizeAndTranformInputItems(account.getCurrency(), items);
final LocalDate resolvedTargetDate = toLocalDateDefaultToday(account, targetDate, callContext);
final UUID invoiceId = invoiceApi.createMigrationInvoice(accountId, resolvedTargetDate, sanitizedInvoiceItems, callContext);
return uriBuilder.buildResponse(uriInfo, InvoiceResource.class, "getInvoice", invoiceId, request);
}
@TimedResource
@POST
@Path("/" + DRY_RUN)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Generate a dryRun invoice", response = InvoiceJson.class)
@ApiResponses(value = {/* @ApiResponse(code = 200, message = "Successful"), */
@ApiResponse(code = 400, message = "Invalid account id or target datetime supplied")})
public Response generateDryRunInvoice(@Nullable final InvoiceDryRunJson dryRunSubscriptionSpec,
@ApiParam(required=true) @QueryParam(QUERY_ACCOUNT_ID) final UUID accountId,
@Nullable @QueryParam(QUERY_TARGET_DATE) final String targetDate,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, InvoiceApiException {
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final LocalDate inputDate;
if (dryRunSubscriptionSpec != null) {
if (DryRunType.UPCOMING_INVOICE.name().equals(dryRunSubscriptionSpec.getDryRunType())) {
inputDate = null;
} else if (DryRunType.SUBSCRIPTION_ACTION.name().equals(dryRunSubscriptionSpec.getDryRunType()) && dryRunSubscriptionSpec.getEffectiveDate() != null) {
inputDate = dryRunSubscriptionSpec.getEffectiveDate();
} else {
inputDate = toLocalDate(targetDate);
}
} else {
inputDate = toLocalDate(targetDate);
}
// Passing a null or empty body means we are trying to generate an invoice with a (future) targetDate
// On the other hand if body is not null, we are attempting a dryRun subscription operation
if (dryRunSubscriptionSpec != null && dryRunSubscriptionSpec.getDryRunAction() != null) {
if (SubscriptionEventType.START_BILLING.toString().equals(dryRunSubscriptionSpec.getDryRunAction())) {
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getProductName(), "DryRun subscription product category should be specified");
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getBillingPeriod(), "DryRun subscription billingPeriod should be specified");
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getProductCategory(), "DryRun subscription product category should be specified");
if (dryRunSubscriptionSpec.getProductCategory().equals(ProductCategory.ADD_ON)) {
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getBundleId(), "DryRun bundle ID should be specified");
}
} else if (SubscriptionEventType.CHANGE.toString().equals(dryRunSubscriptionSpec.getDryRunAction())) {
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getProductName(), "DryRun subscription product category should be specified");
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getBillingPeriod(), "DryRun subscription billingPeriod should be specified");
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getSubscriptionId(), "DryRun subscriptionID should be specified");
} else if (SubscriptionEventType.STOP_BILLING.toString().equals(dryRunSubscriptionSpec.getDryRunAction())) {
verifyNonNullOrEmpty(dryRunSubscriptionSpec.getSubscriptionId(), "DryRun subscriptionID should be specified");
}
}
final Account account = accountUserApi.getAccountById(accountId, callContext);
final DryRunArguments dryRunArguments = new DefaultDryRunArguments(dryRunSubscriptionSpec, account);
try {
final Invoice generatedInvoice = invoiceApi.triggerInvoiceGeneration(accountId, inputDate, dryRunArguments,
callContext);
return Response.status(Status.OK).entity(new InvoiceJson(generatedInvoice, true, null, null)).build();
} catch (InvoiceApiException e) {
if (e.getCode() == ErrorCode.INVOICE_NOTHING_TO_DO.getCode()) {
return Response.status(Status.NOT_FOUND).build();
}
throw e;
}
}
@TimedResource
@DELETE
@Path("/{invoiceId:" + UUID_PATTERN + "}" + "/{invoiceItemId:" + UUID_PATTERN + "}/cba")
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Delete a CBA item")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid account id, invoice id or invoice item id supplied"),
@ApiResponse(code = 404, message = "Account or invoice not found")})
public Response deleteCBA(@PathParam("invoiceId") final UUID invoiceId,
@PathParam("invoiceItemId") final UUID invoiceItemId,
@ApiParam(required=true) @QueryParam(QUERY_ACCOUNT_ID) final UUID accountId,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws AccountApiException, InvoiceApiException {
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final Account account = accountUserApi.getAccountById(accountId, callContext);
invoiceApi.deleteCBA(account.getId(), invoiceId, invoiceItemId, callContext);
return Response.status(Status.NO_CONTENT).build();
}
@TimedResource
@POST
@Path("/{invoiceId:" + UUID_PATTERN + "}")
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Adjust an invoice item", response = InvoiceJson.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created adjustment Successfully"),
@ApiResponse(code = 400, message = "Invalid account id, invoice id or invoice item id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response adjustInvoiceItem(@PathParam("invoiceId") final UUID invoiceId,
final InvoiceItemJson json,
@QueryParam(QUERY_REQUESTED_DT) final String requestedDateTimeString,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, InvoiceApiException {
verifyNonNullOrEmpty(json, "InvoiceItemJson body should be specified");
verifyNonNullOrEmpty(json.getAccountId(), "InvoiceItemJson accountId needs to be set",
json.getInvoiceItemId(), "InvoiceItemJson invoiceItemId needs to be set");
final UUID accountId = json.getAccountId();
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final LocalDate requestedDate = toLocalDateDefaultToday(accountId, requestedDateTimeString, callContext);
final InvoiceItem adjustmentItem;
if (json.getAmount() == null) {
adjustmentItem = invoiceApi.insertInvoiceItemAdjustment(accountId,
invoiceId,
json.getInvoiceItemId(),
requestedDate,
json.getDescription(),
json.getItemDetails(),
callContext);
} else {
adjustmentItem = invoiceApi.insertInvoiceItemAdjustment(accountId,
invoiceId,
json.getInvoiceItemId(),
requestedDate,
json.getAmount(),
json.getCurrency(),
json.getDescription(),
json.getItemDetails(),
callContext);
}
if (adjustmentItem == null) {
return Response.status(Status.NOT_FOUND).build();
} else {
return uriBuilder.buildResponse(uriInfo, InvoiceResource.class, "getInvoice", adjustmentItem.getInvoiceId(), request);
}
}
@TimedResource
@POST
@Produces(APPLICATION_JSON)
@Consumes(APPLICATION_JSON)
@Path("/" + CHARGES + "/{accountId:" + UUID_PATTERN + "}")
@ApiOperation(value = "Create external charge(s)", response = InvoiceItemJson.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created external charge Successfully"),
@ApiResponse(code = 400, message = "Invalid account id supplied"),
@ApiResponse(code = 404, message = "Account not found")})
public Response createExternalCharges(@PathParam("accountId") final UUID accountId,
final List<InvoiceItemJson> externalChargesJson,
@QueryParam(QUERY_REQUESTED_DT) final String requestedDateTimeString,
@QueryParam(QUERY_PAY_INVOICE) @DefaultValue("false") final Boolean payInvoice,
@QueryParam(QUERY_PLUGIN_PROPERTY) final List<String> pluginPropertiesString,
@QueryParam(QUERY_AUTO_COMMIT) @DefaultValue("false") final Boolean autoCommit,
@QueryParam(QUERY_PAYMENT_EXTERNAL_KEY) final String paymentExternalKey,
@QueryParam(QUERY_TRANSACTION_EXTERNAL_KEY) final String transactionExternalKey,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final UriInfo uriInfo,
@javax.ws.rs.core.Context final HttpServletRequest request) throws AccountApiException, InvoiceApiException, PaymentApiException {
final Iterable<PluginProperty> pluginProperties = extractPluginProperties(pluginPropertiesString);
final CallContext callContext = context.createCallContextWithAccountId(accountId, createdBy, reason, comment, request);
final Account account = accountUserApi.getAccountById(accountId, callContext);
final Iterable<InvoiceItem> sanitizedExternalChargesJson = validateSanitizeAndTranformInputItems(account.getCurrency(), externalChargesJson);
// Get the effective date of the external charge, in the account timezone
final LocalDate requestedDate = toLocalDateDefaultToday(account, requestedDateTimeString, callContext);
final List<InvoiceItem> createdExternalCharges = invoiceApi.insertExternalCharges(account.getId(), requestedDate, sanitizedExternalChargesJson, autoCommit, callContext);
// if all createdExternalCharges point to the same invoiceId, use the provided paymentExternalKey and / or transactionExternalKey
final boolean haveSameInvoiceId = Iterables.all(createdExternalCharges, new Predicate<InvoiceItem>() {
@Override
public boolean apply(final InvoiceItem input) {
return input.getInvoiceId().equals(createdExternalCharges.get(0).getInvoiceId());
}
});
if (payInvoice) {
final Collection<UUID> paidInvoices = new HashSet<UUID>();
for (final InvoiceItem externalCharge : createdExternalCharges) {
if (!paidInvoices.contains(externalCharge.getInvoiceId())) {
paidInvoices.add(externalCharge.getInvoiceId());
final Invoice invoice = invoiceApi.getInvoice(externalCharge.getInvoiceId(), callContext);
createPurchaseForInvoice(account, invoice.getId(), invoice.getBalance(), account.getPaymentMethodId(), false,
(haveSameInvoiceId && paymentExternalKey != null) ? paymentExternalKey : null,
(haveSameInvoiceId && transactionExternalKey != null) ? transactionExternalKey : null,
pluginProperties, callContext);
}
}
}
final List<InvoiceItemJson> createdExternalChargesJson = Lists.<InvoiceItem, InvoiceItemJson>transform(createdExternalCharges,
new Function<InvoiceItem, InvoiceItemJson>() {
@Override
public InvoiceItemJson apply(final InvoiceItem input) {
return new InvoiceItemJson(input);
}
}
);
return Response.status(Status.OK).entity(createdExternalChargesJson).build();
}
private Iterable<InvoiceItem> validateSanitizeAndTranformInputItems(final Currency accountCurrency, final Iterable<InvoiceItemJson> inputItems) throws InvoiceApiException {
try {
final Iterable<InvoiceItemJson> sanitized = Iterables.transform(inputItems, new Function<InvoiceItemJson, InvoiceItemJson>() {
@Override
public InvoiceItemJson apply(final InvoiceItemJson input) {
if (input.getCurrency() != null) {
if (!input.getCurrency().equals(accountCurrency)) {
throw new IllegalArgumentException(input.getCurrency().toString());
}
return input;
} else {
return new InvoiceItemJson(null,
input.getInvoiceId(),
null,
input.getAccountId(),
input.getChildAccountId(),
input.getBundleId(),
input.getSubscriptionId(),
input.getPlanName(),
input.getPhaseName(),
input.getUsageName(),
input.getPrettyPlanName(),
input.getPrettyPhaseName(),
input.getPrettyUsageName(),
input.getItemType(),
input.getDescription(),
input.getStartDate(),
input.getEndDate(),
input.getAmount(),
input.getRate(),
accountCurrency,
input.getQuantity(),
input.getItemDetails(),
null,
null);
}
}
});
return Iterables.transform(sanitized, new Function<InvoiceItemJson, InvoiceItem>() {
@Override
public InvoiceItem apply(final InvoiceItemJson input) {
return input.toInvoiceItem();
}
});
} catch (IllegalArgumentException e) {
throw new InvoiceApiException(ErrorCode.CURRENCY_INVALID, accountCurrency, e.getMessage());
}
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + PAYMENTS)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve payments associated with an invoice", response = InvoicePaymentJson.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid invoice id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response getPaymentsForInvoice(@PathParam("invoiceId") final UUID invoiceId,
@QueryParam(QUERY_WITH_PLUGIN_INFO) @DefaultValue("false") final Boolean withPluginInfo,
@QueryParam(QUERY_WITH_ATTEMPTS) @DefaultValue("false") final Boolean withAttempts,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws PaymentApiException, InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Invoice invoice = invoiceApi.getInvoice(invoiceId, tenantContext);
// Extract unique set of paymentId for this invoice
final Set<UUID> invoicePaymentIds = ImmutableSet.copyOf(Iterables.transform(invoice.getPayments(), new Function<InvoicePayment, UUID>() {
@Override
public UUID apply(final InvoicePayment input) {
return input.getPaymentId();
}
}));
if (invoicePaymentIds.isEmpty()) {
return Response.status(Status.OK).entity(ImmutableList.<InvoicePaymentJson>of()).build();
}
final List<Payment> payments = new ArrayList<Payment>();
for (final UUID paymentId : invoicePaymentIds) {
final Payment payment = paymentApi.getPayment(paymentId, withPluginInfo, withAttempts, ImmutableList.<PluginProperty>of(), tenantContext);
payments.add(payment);
}
final Iterable<InvoicePaymentJson> result = INVOICE_PAYMENT_ORDERING.sortedCopy(Iterables.transform(payments, new Function<Payment, InvoicePaymentJson>() {
@Override
public InvoicePaymentJson apply(final Payment input) {
return new InvoicePaymentJson(input, invoice.getId(), null);
}
}));
return Response.status(Status.OK).entity(result).build();
}
@TimedResource
@POST
@Produces(APPLICATION_JSON)
@Consumes(APPLICATION_JSON)
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + PAYMENTS)
@ApiOperation(value = "Trigger a payment for invoice", response = InvoicePaymentJson.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created payment Successfully"),
@ApiResponse(code = 204, message = "Nothing to pay for"),
@ApiResponse(code = 400, message = "Invalid account id or invoice id supplied"),
@ApiResponse(code = 404, message = "Account not found")})
public Response createInstantPayment(@PathParam("invoiceId") final UUID invoiceId,
final InvoicePaymentJson payment,
@QueryParam(QUERY_PAYMENT_EXTERNAL) @DefaultValue("false") final Boolean externalPayment,
@QueryParam(QUERY_PLUGIN_PROPERTY) final List<String> pluginPropertiesString,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws AccountApiException, PaymentApiException {
verifyNonNullOrEmpty(payment, "InvoicePaymentJson body should be specified");
verifyNonNullOrEmpty(payment.getAccountId(), "InvoicePaymentJson accountId needs to be set",
payment.getTargetInvoiceId(), "InvoicePaymentJson targetInvoiceId needs to be set",
payment.getPurchasedAmount(), "InvoicePaymentJson purchasedAmount needs to be set");
Preconditions.checkArgument(!externalPayment || payment.getPaymentMethodId() == null, "InvoicePaymentJson should not contain a paymentMethodId when this is an external payment");
final Iterable<PluginProperty> pluginProperties = extractPluginProperties(pluginPropertiesString);
final CallContext callContext = context.createCallContextNoAccountId(createdBy, reason, comment, request);
final Account account = accountUserApi.getAccountById(payment.getAccountId(), callContext);
final UUID paymentMethodId = externalPayment ? null :
(payment.getPaymentMethodId() != null ? payment.getPaymentMethodId() : account.getPaymentMethodId());
final Payment result = createPurchaseForInvoice(account, invoiceId, payment.getPurchasedAmount(), paymentMethodId, externalPayment,
payment.getPaymentExternalKey(), null, pluginProperties, callContext);
return result != null ?
uriBuilder.buildResponse(uriInfo, InvoicePaymentResource.class, "getInvoicePayment", result.getId(), request) :
Response.status(Status.NO_CONTENT).build();
}
@TimedResource
@GET
@Path("/" + INVOICE_TRANSLATION + "/{locale:" + ANYTHING_PATTERN + "}/")
@Produces(TEXT_PLAIN)
@ApiOperation(value = "Retrieves the invoice translation for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid locale supplied"),
@ApiResponse(code = 404, message = "Translation not found")})
public Response getInvoiceTranslation(@PathParam("locale") final String localeStr,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
return getTemplateResource(localeStr, TenantKey.INVOICE_TRANSLATION_, request);
}
@TimedResource
@POST
@Produces(TEXT_PLAIN)
@Consumes(TEXT_PLAIN)
@Path("/" + INVOICE_TRANSLATION + "/{locale:" + ANYTHING_PATTERN + "}/")
@ApiOperation(value = "Upload the invoice translation for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Uploaded invoice translation Successfully")})
public Response uploadInvoiceTranslation(@PathParam("locale") final String localeStr,
final String invoiceTranslation,
@QueryParam(QUERY_DELETE_IF_EXISTS) @DefaultValue("false") final boolean deleteIfExists,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws Exception {
return uploadTemplateResource(invoiceTranslation,
localeStr,
deleteIfExists,
TenantKey.INVOICE_TRANSLATION_,
"getInvoiceTranslation",
createdBy,
reason,
comment,
request,
uriInfo);
}
@TimedResource
@GET
@Path("/" + INVOICE_CATALOG_TRANSLATION + "/{locale:" + ANYTHING_PATTERN + "}/")
@Produces(TEXT_PLAIN)
@ApiOperation(value = "Retrieves the catalog translation for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid locale supplied"),
@ApiResponse(code = 404, message = "Template not found")})
public Response getCatalogTranslation(@PathParam("locale") final String localeStr,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
return getTemplateResource(localeStr, TenantKey.CATALOG_TRANSLATION_, request);
}
@TimedResource
@POST
@Produces(TEXT_PLAIN)
@Consumes(TEXT_PLAIN)
@Path("/" + INVOICE_CATALOG_TRANSLATION + "/{locale:" + ANYTHING_PATTERN + "}/")
@ApiOperation(value = "Upload the catalog translation for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Uploaded catalog translation Successfully")})
public Response uploadCatalogTranslation(@PathParam("locale") final String localeStr,
final String catalogTranslation,
@QueryParam(QUERY_DELETE_IF_EXISTS) @DefaultValue("false") final boolean deleteIfExists,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws Exception {
return uploadTemplateResource(catalogTranslation,
localeStr,
deleteIfExists,
TenantKey.CATALOG_TRANSLATION_,
"getCatalogTranslation",
createdBy,
reason,
comment,
request,
uriInfo);
}
@TimedResource
@GET
@Path("/" + INVOICE_TEMPLATE)
@Produces(TEXT_HTML)
@ApiOperation(value = "Retrieves the invoice template for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 404, message = "Template not found")})
public Response getInvoiceTemplate(@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
return getTemplateResource(null, TenantKey.INVOICE_TEMPLATE, request);
}
@TimedResource
@POST
@Produces(TEXT_HTML)
@Consumes(TEXT_HTML)
@Path("/" + INVOICE_TEMPLATE)
@ApiOperation(value = "Upload the invoice template for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Uploaded invoice template Successfully")})
public Response uploadInvoiceTemplate(final String catalogTranslation,
@QueryParam(QUERY_DELETE_IF_EXISTS) @DefaultValue("false") final boolean deleteIfExists,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws Exception {
return uploadTemplateResource(catalogTranslation,
null,
deleteIfExists,
TenantKey.INVOICE_TEMPLATE,
"getInvoiceTemplate",
createdBy,
reason,
comment,
request,
uriInfo);
}
@TimedResource
@GET
@Path("/" + INVOICE_MP_TEMPLATE + "/{locale:" + ANYTHING_PATTERN + "}/")
@Produces(TEXT_HTML)
@ApiOperation(value = "Retrieves the manualPay invoice template for the tenant", response = String.class)
@ApiResponses(value = {@ApiResponse(code = 404, message = "Template not found")})
public Response getInvoiceMPTemplate(@PathParam("locale") final String localeStr,
@javax.ws.rs.core.Context final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
return getTemplateResource(null, TenantKey.INVOICE_MP_TEMPLATE, request);
}
@TimedResource
@POST
@Produces(TEXT_HTML)
@Consumes(TEXT_HTML)
@Path("/" + INVOICE_MP_TEMPLATE)
@ApiOperation(value = "Upload the manualPay invoice template for the tenant", response = String.class)
public Response uploadInvoiceMPTemplate(final String catalogTranslation,
@QueryParam(QUERY_DELETE_IF_EXISTS) @DefaultValue("false") final boolean deleteIfExists,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws Exception {
return uploadTemplateResource(catalogTranslation,
null,
deleteIfExists,
TenantKey.INVOICE_MP_TEMPLATE,
"getInvoiceMPTemplate",
createdBy,
reason,
comment,
request,
uriInfo);
}
private Response uploadTemplateResource(final String templateResource,
@Nullable final String localeStr,
final boolean deleteIfExists,
final TenantKey tenantKey,
final String getMethodStr,
final String createdBy,
final String reason,
final String comment,
final HttpServletRequest request,
final UriInfo uriInfo) throws Exception {
final String tenantKeyStr;
if (localeStr != null) {
// Validation purpose: Will throw bad stream
final InputStream stream = new ByteArrayInputStream(templateResource.getBytes());
new PropertyResourceBundle(stream);
final Locale locale = localeStr != null ? LocaleUtils.toLocale(localeStr) : defaultLocale;
tenantKeyStr = LocaleUtils.localeString(locale, tenantKey.toString());
} else {
tenantKeyStr = tenantKey.toString();
}
final CallContext callContext = context.createCallContextNoAccountId(createdBy, reason, comment, request);
if (!tenantApi.getTenantValuesForKey(tenantKeyStr, callContext).isEmpty()) {
if (deleteIfExists) {
tenantApi.deleteTenantKey(tenantKeyStr, callContext);
} else {
return Response.status(Status.BAD_REQUEST).build();
}
}
tenantApi.addTenantKeyValue(tenantKeyStr, templateResource, callContext);
return uriBuilder.buildResponse(uriInfo, InvoiceResource.class, getMethodStr, localeStr, request);
}
private Response getTemplateResource(@Nullable final String localeStr,
final TenantKey tenantKey,
final HttpServletRequest request) throws InvoiceApiException, TenantApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final String tenantKeyStr = localeStr != null ?
LocaleUtils.localeString(LocaleUtils.toLocale(localeStr), tenantKey.toString()) :
tenantKey.toString();
final List<String> result = tenantApi.getTenantValuesForKey(tenantKeyStr, tenantContext);
return result.isEmpty() ? Response.status(Status.NOT_FOUND).build() : Response.status(Status.OK).entity(result.get(0)).build();
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve invoice custom fields", response = CustomFieldJson.class, responseContainer = "List", nickname = "getInvoiceCustomFields")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response getCustomFields(@PathParam(ID_PARAM_NAME) final UUID id,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) {
return super.getCustomFields(id, auditMode, context.createTenantContextNoAccountId(request));
}
@TimedResource
@POST
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Add custom fields to invoice", response = CustomField.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 201, message = "Custom field created successfully"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response createInvoiceCustomFields(@PathParam(ID_PARAM_NAME) final UUID id,
final List<CustomFieldJson> customFields,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws CustomFieldApiException {
return super.createCustomFields(id, customFields,
context.createCallContextNoAccountId(createdBy, reason, comment, request), uriInfo, request);
}
@TimedResource
@PUT
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Modify custom fields to invoice")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response modifyInvoiceCustomFields(@PathParam(ID_PARAM_NAME) final UUID id,
final List<CustomFieldJson> customFields,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws CustomFieldApiException {
return super.modifyCustomFields(id, customFields,
context.createCallContextNoAccountId(createdBy, reason, comment, request));
}
@TimedResource
@DELETE
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Remove custom fields from invoice")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response deleteInvoiceCustomFields(@PathParam(ID_PARAM_NAME) final UUID id,
@QueryParam(QUERY_CUSTOM_FIELD) final List<UUID> customFieldList,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws CustomFieldApiException {
return super.deleteCustomFields(id, customFieldList,
context.createCallContextNoAccountId(createdBy, reason, comment, request));
}
@TimedResource
@GET
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + TAGS)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve invoice tags", response = TagJson.class, responseContainer = "List", nickname = "getInvoiceTags")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid invoice id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response getTags(@PathParam(ID_PARAM_NAME) final UUID invoiceId,
@QueryParam(QUERY_INCLUDED_DELETED) @DefaultValue("false") final Boolean includedDeleted,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws TagDefinitionApiException, InvoiceApiException {
final TenantContext tenantContext = context.createTenantContextNoAccountId(request);
final Invoice invoice = invoiceApi.getInvoice(invoiceId, tenantContext);
return super.getTags(invoice.getAccountId(), invoiceId, auditMode, includedDeleted, tenantContext);
}
@TimedResource
@POST
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + TAGS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Add tags to invoice", response = TagJson.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 201, message = "Tag created successfully"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response createInvoiceTags(@PathParam(ID_PARAM_NAME) final UUID id,
final List<UUID> tagList,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final UriInfo uriInfo,
@javax.ws.rs.core.Context final HttpServletRequest request) throws TagApiException {
return super.createTags(id, tagList, uriInfo,
context.createCallContextNoAccountId(createdBy, reason, comment, request), request);
}
@TimedResource
@DELETE
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + TAGS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Remove tags from invoice")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied")})
public Response deleteInvoiceTags(@PathParam(ID_PARAM_NAME) final UUID id,
@QueryParam(QUERY_TAG) final List<UUID> tagList,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws TagApiException {
return super.deleteTags(id, tagList,
context.createCallContextNoAccountId(createdBy, reason, comment, request));
}
@TimedResource
@PUT
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + COMMIT_INVOICE)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Perform the invoice status transition from DRAFT to COMMITTED")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response commitInvoice(@PathParam("invoiceId") final UUID invoiceId,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws InvoiceApiException {
final CallContext callContext = context.createCallContextNoAccountId(createdBy, reason, comment, request);
invoiceApi.commitInvoice(invoiceId, callContext);
return Response.status(Status.NO_CONTENT).build();
}
@TimedResource
@PUT
@Path("/{invoiceId:" + UUID_PATTERN + "}/" + VOID_INVOICE)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Perform the action of voiding an invoice")
@ApiResponses(value = {@ApiResponse(code = 204, message = "Successful operation"),
@ApiResponse(code = 400, message = "Invalid invoice id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response voidInvoice(@PathParam("invoiceId") final UUID invoiceId,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws InvoiceApiException {
final CallContext callContext = context.createCallContextNoAccountId(createdBy, reason, comment, request);
invoiceApi.voidInvoice(invoiceId, callContext);
return Response.status(Status.NO_CONTENT).build();
}
@Override
protected ObjectType getObjectType() {
return ObjectType.INVOICE;
}
private static class DefaultDryRunArguments implements DryRunArguments {
private final DryRunType dryRunType;
private final SubscriptionEventType action;
private final UUID subscriptionId;
private final LocalDate effectiveDate;
private final PlanPhaseSpecifier specifier;
private final UUID bundleId;
private final BillingActionPolicy billingPolicy;
private final List<PlanPhasePriceOverride> overrides;
public DefaultDryRunArguments(final InvoiceDryRunJson input, final Account account) {
if (input == null) {
this.dryRunType = DryRunType.TARGET_DATE;
this.action = null;
this.subscriptionId = null;
this.effectiveDate = null;
this.specifier = null;
this.bundleId = null;
this.billingPolicy = null;
this.overrides = null;
} else {
this.dryRunType = input.getDryRunType() != null ? input.getDryRunType() : DryRunType.TARGET_DATE;
this.action = input.getDryRunAction() != null ? input.getDryRunAction() : null;
this.subscriptionId = input.getSubscriptionId();
this.bundleId = input.getBundleId();
this.effectiveDate = input.getEffectiveDate();
this.billingPolicy = input.getBillingPolicy() != null ? input.getBillingPolicy() : null;
final PlanPhaseSpecifier planPhaseSpecifier = (input.getProductName() != null &&
input.getProductCategory() != null &&
input.getBillingPeriod() != null) ?
new PlanPhaseSpecifier(input.getProductName(),
input.getBillingPeriod(),
input.getPriceListName(),
input.getPhaseType() != null ? input.getPhaseType() : null) :
null;
this.specifier = planPhaseSpecifier;
this.overrides = input.getPriceOverrides() != null ?
ImmutableList.copyOf(Iterables.transform(input.getPriceOverrides(), new Function<PhasePriceOverrideJson, PlanPhasePriceOverride>() {
@Nullable
@Override
public PlanPhasePriceOverride apply(@Nullable final PhasePriceOverrideJson input) {
if (input.getPhaseName() != null) {
return new DefaultPlanPhasePriceOverride(input.getPhaseName(), account.getCurrency(), input.getFixedPrice(), input.getRecurringPrice(), null);
} else {
return new DefaultPlanPhasePriceOverride(planPhaseSpecifier, account.getCurrency(), input.getFixedPrice(), input.getRecurringPrice(), null);
}
}
})) : ImmutableList.<PlanPhasePriceOverride>of();
}
}
@Override
public DryRunType getDryRunType() {
return dryRunType;
}
@Override
public PlanPhaseSpecifier getPlanPhaseSpecifier() {
return specifier;
}
@Override
public SubscriptionEventType getAction() {
return action;
}
@Override
public UUID getSubscriptionId() {
return subscriptionId;
}
@Override
public LocalDate getEffectiveDate() {
return effectiveDate;
}
@Override
public UUID getBundleId() {
return bundleId;
}
@Override
public BillingActionPolicy getBillingActionPolicy() {
return billingPolicy;
}
@Override
public List<PlanPhasePriceOverride> getPlanPhasePriceOverrides() {
return overrides;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("DefaultDryRunArguments{");
sb.append("dryRunType=").append(dryRunType);
sb.append(", action=").append(action);
sb.append(", subscriptionId=").append(subscriptionId);
sb.append(", effectiveDate=").append(effectiveDate);
sb.append(", specifier=").append(specifier);
sb.append(", bundleId=").append(bundleId);
sb.append(", billingPolicy=").append(billingPolicy);
sb.append(", overrides=").append(overrides);
sb.append('}');
return sb.toString();
}
}
}
|
jaxrs: Modify dryrun endpoint to return 204 instead of 404 when there is nothing to generate
|
jaxrs/src/main/java/org/killbill/billing/jaxrs/resources/InvoiceResource.java
|
jaxrs: Modify dryrun endpoint to return 204 instead of 404 when there is nothing to generate
|
|
Java
|
apache-2.0
|
39522f00e87698fe83009cd36812eca15603b3d9
| 0
|
coolcrowd/object-service,coolcrowd/ObjectService,coolcrowd/object-service,coolcrowd/ObjectService,coolcrowd/object-service,coolcrowd/object-service,coolcrowd/ObjectService,coolcrowd/ObjectService
|
package edu.kit.ipd.crowdcontrol.objectservice;
import edu.kit.ipd.crowdcontrol.objectservice.crowdworking.PlatformManager;
import edu.kit.ipd.crowdcontrol.objectservice.crowdworking.TaskOperationException;
import edu.kit.ipd.crowdcontrol.objectservice.event.ChangeEvent;
import edu.kit.ipd.crowdcontrol.objectservice.event.Event;
import edu.kit.ipd.crowdcontrol.objectservice.event.EventManager;
import edu.kit.ipd.crowdcontrol.objectservice.event.EventObservable;
import edu.kit.ipd.crowdcontrol.objectservice.proto.Experiment;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import rx.Observable;
import java.util.LinkedList;
import java.util.Queue;
import java.util.concurrent.TimeUnit;
/**
* Created by lucaskrauss at 05.02.2016
*/
public class ExperimentController {
private final Logger log = LogManager.getLogger(ExperimentController.class);
private final Observable<Event<ChangeEvent<Experiment>>> observable = EventManager.EXPERIMENT_CHANGE.getObservable();
private final EventObservable<ChangeEvent<Experiment>> endExpObservable = EventManager.EXPERIMENT_CHANGE;
private final PlatformManager platformManager;
public ExperimentController(PlatformManager manager) {
platformManager = manager;
observable.subscribe(experimentEvent -> {
if (experimentEvent.getData().getOld().getState() != Experiment.State.PUBLISHED
&& experimentEvent.getData().getNeww().getState() == Experiment.State.PUBLISHED){
startExperiment(experimentEvent.getData().getNeww());
}
});
}
/**
* Starts the experiment by publishing it on the participating platforms.
* If an error occurs during the publishing, all already created tasks for this experiment will be undone
*
* @param experiment to be started
*/
private void startExperiment(Experiment experiment) {
Queue<String> successfulPlatforms = new LinkedList<>();
for (int i = 0; i < experiment.getPopulationsCount(); i++) {
try {
platformManager.publishTask(experiment.getPopulations(i).getPlatformId(), experiment);
successfulPlatforms.add(experiment.getPopulations(i).getPlatformId());
} catch (TaskOperationException e1) {
//could not create task
log.fatal(String.format("Error! Could not create experiment on platform %s!", experiment.getPopulations(i).getPlatformId()), e1);
unpublishExperiment(experiment, successfulPlatforms);
} catch (IllegalStateException | IllegalArgumentException e2) {
log.fatal("Error! Could not create experiment! " + e2.getMessage());
unpublishExperiment(experiment, successfulPlatforms);
}
}
}
/**
* Unpublishes an experiment from all platforms it is meant to be active on.
* This method is only called if the initialization of the experiment has failed on one
* of its platforms.
*
* @param experiment which is going to be unpublished
* @param successfulPlatforms all platforms the experiment already was published on
*/
private void unpublishExperiment(Experiment experiment, Queue<String> successfulPlatforms) {
while (!successfulPlatforms.isEmpty()) {
try {
platformManager.unpublishTask(successfulPlatforms.remove(), experiment);
} catch (TaskOperationException e1) {
log.fatal("Error! Could not unpublish experiment from platform! " + e1.getMessage());
}
}
}
/**
* Unpublished the experiment from all its platform. Waits until the time for giving answers and/or ratings
* on the platforms (specified in the config-file) has run out. The experiment's state is set to STOPPED and a
* matching event is emitted.
*
* @param experiment which is to be ended.
*/
public void endExperiment(Experiment experiment) {
for (int i = 0; i < experiment.getPopulationsCount(); i++) {
try {
platformManager.unpublishTask(experiment.getPopulations(i).getPlatformId(), experiment);
} catch (TaskOperationException e1) {
log.fatal(String.format("Error! Cannot unpublish experiment from platform %s!", experiment.getPopulations(i).getPlatformId()), e1);
}
}
//wait for crowd platform time out
try {
TimeUnit.HOURS.sleep(2); //TODO get time from config
} catch (InterruptedException e) {
e.printStackTrace();
}
Experiment newExperiment = experiment.toBuilder().setState(Experiment.State.STOPPED).build();
//notify all observers
endExpObservable.emit(new ChangeEvent<Experiment>(experiment, newExperiment));
}
}
|
src/main/java/edu/kit/ipd/crowdcontrol/objectservice/ExperimentController.java
|
package edu.kit.ipd.crowdcontrol.objectservice;
import edu.kit.ipd.crowdcontrol.objectservice.crowdworking.PlatformManager;
import edu.kit.ipd.crowdcontrol.objectservice.crowdworking.TaskOperationException;
import edu.kit.ipd.crowdcontrol.objectservice.event.ChangeEvent;
import edu.kit.ipd.crowdcontrol.objectservice.event.Event;
import edu.kit.ipd.crowdcontrol.objectservice.event.EventManager;
import edu.kit.ipd.crowdcontrol.objectservice.event.EventObservable;
import edu.kit.ipd.crowdcontrol.objectservice.proto.Experiment;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import rx.Observable;
import rx.Observer;
import java.util.LinkedList;
import java.util.Queue;
import java.util.concurrent.TimeUnit;
/**
* Created by lucaskrauss at 05.02.2016
*/
public class ExperimentController implements Observer<Event<Experiment>> {
private final Logger log = LogManager.getLogger(ExperimentController.class);
private final Observable<Event<Experiment>> observable = EventManager.EXPERIMENT_CREATE.getObservable();
private final EventObservable<ChangeEvent<Experiment>> endExpObservable = EventManager.EXPERIMENT_CHANGE;
private final PlatformManager platformManager;
public ExperimentController(PlatformManager manager) {
platformManager = manager;
observable.subscribe();
}
/**
* Starts the experiment by publishing it on the participating platforms.
* If an error occurs during the publishing, all already created tasks for this experiment will be undone
*
* @param experiment to be started
*/
private void startExperiment(Experiment experiment) {
Queue<String> successfulPlatforms = new LinkedList<>();
for (int i = 0; i < experiment.getPopulationsCount(); i++) {
try {
platformManager.publishTask(experiment.getPopulations(i).getPlatformId(), experiment);
successfulPlatforms.add(experiment.getPopulations(i).getPlatformId());
} catch (TaskOperationException e1) {
//could not create task
log.fatal(String.format("Error! Could not create experiment on platform %s!", experiment.getPopulations(i).getPlatformId()), e1);
unpublishExperiment(experiment, successfulPlatforms);
} catch (IllegalStateException | IllegalArgumentException e2) {
log.fatal("Error! Could not create experiment! " + e2.getMessage());
unpublishExperiment(experiment, successfulPlatforms);
}
}
//ChangeEvent mit State.INVALID ?
}
/**
* Unpublishes an experiment from all platforms it is meant to be active on.
* This method is only called if the initialization of the experiment has failed on one
* of its platforms.
*
* @param experiment which is going to be unpublished
* @param successfulPlatforms all platforms the experiment already was published on
*/
private void unpublishExperiment(Experiment experiment, Queue<String> successfulPlatforms) {
while (!successfulPlatforms.isEmpty()) {
try {
platformManager.unpublishTask(successfulPlatforms.remove(), experiment);
} catch (TaskOperationException e1) {
log.fatal("Error! Could not unpublish experiment from platform! " + e1.getMessage());
}
}
}
/**
* Unpublished the experiment from all its platform. Waits until the time for giving answers and/or ratings
* on the platforms (specified in the config-file) has run out. The experiment's state is set to STOPPED and a
* matching event is emitted.
*
* @param experiment which is to be ended.
*/
public void endExperiment(Experiment experiment) {
for (int i = 0; i < experiment.getPopulationsCount(); i++) {
try {
platformManager.unpublishTask(experiment.getPopulations(i).getPlatformId(), experiment);
} catch (TaskOperationException e1) {
log.fatal(String.format("Error! Cannot unpublish experiment from platform %s!", experiment.getPopulations(i).getPlatformId()), e1);
}
}
//wait for crowd platform time out
try {
TimeUnit.HOURS.sleep(2); //TODO get time from config
} catch (InterruptedException e) {
e.printStackTrace();
}
Experiment newExperiment = experiment.toBuilder().setState(Experiment.State.STOPPED).build();
//notify all observers
endExpObservable.emit(new ChangeEvent<Experiment>(experiment, newExperiment));
}
@Override
public void onCompleted() {
//NOP
}
@Override
public void onError(Throwable e) {
//NOP
}
@Override
public void onNext(Event<Experiment> experimentEvent) {
startExperiment(experimentEvent.getData());
}
}
|
Clean up. Now starts on state-change to published
|
src/main/java/edu/kit/ipd/crowdcontrol/objectservice/ExperimentController.java
|
Clean up. Now starts on state-change to published
|
|
Java
|
apache-2.0
|
947a56e53ce886b53b3e0287cebddb339e1a10a9
| 0
|
satrapu/church-management-jee,satrapu/church-management-jee
|
/*
* Copyright 2014 satrapu.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ro.satrapu.churchmanagement.persistence;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.MapsId;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.ToString;
/**
* Represents a {@link Person} who is available to act in a church as a discipleship teacher.
*
* @author satrapu
*/
@Entity
@Table(name = "Discipleship_Teachers")
@Data
@EqualsAndHashCode(callSuper = true)
@ToString(callSuper = true)
public class DiscipleshipTeacher extends ManagedEntityBase {
private static final long serialVersionUID = 1L;
@MapsId
@OneToOne(optional = false, fetch = FetchType.EAGER)
@JoinColumn(name = "Id")
private Person person;
}
|
src/main/java/ro/satrapu/churchmanagement/persistence/DiscipleshipTeacher.java
|
/*
* Copyright 2014 satrapu.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ro.satrapu.churchmanagement.persistence;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.MapsId;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.ToString;
/**
* Represents a {@link Person} who is available to act in a church as a discipleship teacher.
*
* @author satrapu
*/
@Entity
@Table(name = "Discipleship_Teachers")
@Data
@EqualsAndHashCode(callSuper = true)
@ToString(callSuper = true)
public class DiscipleshipTeacher extends ManagedEntityBase {
private static final long serialVersionUID = 1L;
@MapsId
@OneToOne
@JoinColumn(name = "Id")
private Person person;
}
|
Enforce the fact that a discipleship teacher will always be associated with one person.
|
src/main/java/ro/satrapu/churchmanagement/persistence/DiscipleshipTeacher.java
|
Enforce the fact that a discipleship teacher will always be associated with one person.
|
|
Java
|
apache-2.0
|
e201004e985f3ae43ee8c65baa16bcc0aecc0000
| 0
|
wuwen5/dubbo,bpzhang/dubbo,lovepoem/dubbo,alibaba/dubbo,fengyie007/dubbo,bpzhang/dubbo,qtvbwfn/dubbo,aglne/dubbo,lovepoem/dubbo,fengyie007/dubbo,aglne/dubbo,alibaba/dubbo,JasonHZXie/dubbo,yuyijq/dubbo,yuyijq/dubbo,qtvbwfn/dubbo,qtvbwfn/dubbo,lovepoem/dubbo,wuwen5/dubbo,JasonHZXie/dubbo,qtvbwfn/dubbo,wuwen5/dubbo,bpzhang/dubbo,fengyie007/dubbo,aglne/dubbo
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.config;
import com.alibaba.dubbo.rpc.Protocol;
import org.junit.Test;
import org.mockito.Mockito;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
public class ProtocolConfigTest {
@Test
public void testDestroy() throws Exception {
Protocol protocol = Mockito.mock(Protocol.class);
MockProtocol2.delegate = protocol;
ProtocolConfig protocolConfig = new ProtocolConfig();
protocolConfig.setName("mockprotocol2");
protocolConfig.destory();
Mockito.verify(protocol).destroy();
}
@Test
public void testName() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setName("name");
Map<String, String> parameters = new HashMap<String, String>();
ProtocolConfig.appendParameters(parameters, protocol);
assertThat(protocol.getName(), equalTo("name"));
assertThat(protocol.getId(), equalTo("name"));
assertThat(parameters.isEmpty(), is(true));
}
@Test
public void testHost() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setHost("host");
Map<String, String> parameters = new HashMap<String, String>();
ProtocolConfig.appendParameters(parameters, protocol);
assertThat(protocol.getHost(), equalTo("host"));
assertThat(parameters.isEmpty(), is(true));
}
@Test
public void testPort() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setPort(8080);
Map<String, String> parameters = new HashMap<String, String>();
ProtocolConfig.appendParameters(parameters, protocol);
assertThat(protocol.getPort(), equalTo(8080));
assertThat(parameters.isEmpty(), is(true));
}
@Test
public void testPath() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setContextpath("context-path");
Map<String, String> parameters = new HashMap<String, String>();
ProtocolConfig.appendParameters(parameters, protocol);
assertThat(protocol.getPath(), equalTo("context-path"));
assertThat(protocol.getContextpath(), equalTo("context-path"));
assertThat(parameters.isEmpty(), is(true));
protocol.setPath("path");
assertThat(protocol.getPath(), equalTo("path"));
assertThat(protocol.getContextpath(), equalTo("path"));
}
@Test
public void testThreads() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setThreads(10);
assertThat(protocol.getThreads(), is(10));
}
@Test
public void testIothreads() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setIothreads(10);
assertThat(protocol.getIothreads(), is(10));
}
@Test
public void testQueues() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setQueues(10);
assertThat(protocol.getQueues(), is(10));
}
@Test
public void testAccepts() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setAccepts(10);
assertThat(protocol.getAccepts(), is(10));
}
@Test
public void testCodec() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setName("dubbo");
protocol.setCodec("mockcodec");
assertThat(protocol.getCodec(), equalTo("mockcodec"));
}
@Test
public void testAccesslog() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setAccesslog("access.log");
assertThat(protocol.getAccesslog(), equalTo("access.log"));
}
@Test
public void testTelnet() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setTelnet("mocktelnethandler");
assertThat(protocol.getTelnet(), equalTo("mocktelnethandler"));
}
@Test
public void testRegister() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setRegister(true);
assertThat(protocol.isRegister(), is(true));
}
@Test
public void testTransporter() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setTransporter("mocktransporter");
assertThat(protocol.getTransporter(), equalTo("mocktransporter"));
}
@Test
public void testExchanger() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setExchanger("mockexchanger");
assertThat(protocol.getExchanger(), equalTo("mockexchanger"));
}
@Test
public void testDispatcher() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setDispatcher("mockdispatcher");
assertThat(protocol.getDispatcher(), equalTo("mockdispatcher"));
}
@Test
public void testNetworker() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setNetworker("networker");
assertThat(protocol.getNetworker(), equalTo("networker"));
}
@Test
public void testParameters() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setParameters(Collections.singletonMap("k1", "v1"));
assertThat(protocol.getParameters(), hasEntry("k1", "v1"));
}
@Test
public void testDefault() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setDefault(true);
assertThat(protocol.isDefault(), is(true));
}
@Test
public void testKeepAlive() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setKeepAlive(true);
assertThat(protocol.getKeepAlive(), is(true));
}
@Test
public void testOptimizer() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setOptimizer("optimizer");
assertThat(protocol.getOptimizer(), equalTo("optimizer"));
}
@Test
public void testExtension() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setExtension("extension");
assertThat(protocol.getExtension(), equalTo("extension"));
}
}
|
dubbo-config/dubbo-config-api/src/test/java/com/alibaba/dubbo/config/ProtocolConfigTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.config;
import com.alibaba.dubbo.common.extension.ExtensionLoader;
import com.alibaba.dubbo.rpc.Protocol;
import org.junit.Test;
import org.mockito.Mockito;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
public class ProtocolConfigTest {
@Test
public void testDestroyAll() throws Exception {
Protocol protocol = Mockito.mock(Protocol.class);
MockProtocol2.delegate = protocol;
ExtensionLoader<Protocol> loader = ExtensionLoader.getExtensionLoader(Protocol.class);
loader.getExtension("mockprotocol2");
ProtocolConfig.destroyAll();
Mockito.verify(protocol).destroy();
}
@Test
public void testDestroy() throws Exception {
Protocol protocol = Mockito.mock(Protocol.class);
MockProtocol2.delegate = protocol;
ProtocolConfig protocolConfig = new ProtocolConfig();
protocolConfig.setName("mockprotocol2");
protocolConfig.destory();
Mockito.verify(protocol).destroy();
}
@Test
public void testName() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setName("name");
Map<String, String> parameters = new HashMap<String, String>();
ProtocolConfig.appendParameters(parameters, protocol);
assertThat(protocol.getName(), equalTo("name"));
assertThat(protocol.getId(), equalTo("name"));
assertThat(parameters.isEmpty(), is(true));
}
@Test
public void testHost() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setHost("host");
Map<String, String> parameters = new HashMap<String, String>();
ProtocolConfig.appendParameters(parameters, protocol);
assertThat(protocol.getHost(), equalTo("host"));
assertThat(parameters.isEmpty(), is(true));
}
@Test
public void testPort() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setPort(8080);
Map<String, String> parameters = new HashMap<String, String>();
ProtocolConfig.appendParameters(parameters, protocol);
assertThat(protocol.getPort(), equalTo(8080));
assertThat(parameters.isEmpty(), is(true));
}
@Test
public void testPath() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setContextpath("context-path");
Map<String, String> parameters = new HashMap<String, String>();
ProtocolConfig.appendParameters(parameters, protocol);
assertThat(protocol.getPath(), equalTo("context-path"));
assertThat(protocol.getContextpath(), equalTo("context-path"));
assertThat(parameters.isEmpty(), is(true));
protocol.setPath("path");
assertThat(protocol.getPath(), equalTo("path"));
assertThat(protocol.getContextpath(), equalTo("path"));
}
@Test
public void testThreads() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setThreads(10);
assertThat(protocol.getThreads(), is(10));
}
@Test
public void testIothreads() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setIothreads(10);
assertThat(protocol.getIothreads(), is(10));
}
@Test
public void testQueues() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setQueues(10);
assertThat(protocol.getQueues(), is(10));
}
@Test
public void testAccepts() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setAccepts(10);
assertThat(protocol.getAccepts(), is(10));
}
@Test
public void testCodec() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setName("dubbo");
protocol.setCodec("mockcodec");
assertThat(protocol.getCodec(), equalTo("mockcodec"));
}
@Test
public void testAccesslog() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setAccesslog("access.log");
assertThat(protocol.getAccesslog(), equalTo("access.log"));
}
@Test
public void testTelnet() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setTelnet("mocktelnethandler");
assertThat(protocol.getTelnet(), equalTo("mocktelnethandler"));
}
@Test
public void testRegister() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setRegister(true);
assertThat(protocol.isRegister(), is(true));
}
@Test
public void testTransporter() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setTransporter("mocktransporter");
assertThat(protocol.getTransporter(), equalTo("mocktransporter"));
}
@Test
public void testExchanger() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setExchanger("mockexchanger");
assertThat(protocol.getExchanger(), equalTo("mockexchanger"));
}
@Test
public void testDispatcher() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setDispatcher("mockdispatcher");
assertThat(protocol.getDispatcher(), equalTo("mockdispatcher"));
}
@Test
public void testNetworker() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setNetworker("networker");
assertThat(protocol.getNetworker(), equalTo("networker"));
}
@Test
public void testParameters() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setParameters(Collections.singletonMap("k1", "v1"));
assertThat(protocol.getParameters(), hasEntry("k1", "v1"));
}
@Test
public void testDefault() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setDefault(true);
assertThat(protocol.isDefault(), is(true));
}
@Test
public void testKeepAlive() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setKeepAlive(true);
assertThat(protocol.getKeepAlive(), is(true));
}
@Test
public void testOptimizer() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setOptimizer("optimizer");
assertThat(protocol.getOptimizer(), equalTo("optimizer"));
}
@Test
public void testExtension() throws Exception {
ProtocolConfig protocol = new ProtocolConfig();
protocol.setExtension("extension");
assertThat(protocol.getExtension(), equalTo("extension"));
}
}
|
Fix UT failure.
|
dubbo-config/dubbo-config-api/src/test/java/com/alibaba/dubbo/config/ProtocolConfigTest.java
|
Fix UT failure.
|
|
Java
|
apache-2.0
|
2bf71e8e937a64ea4a18e85cc818bc9408aa8a8c
| 0
|
eclab/edisyn,eclab/edisyn,eclab/edisyn
|
/***
Copyright 2020 by Sean Luke
Licensed under the Apache License version 2.0
*/
package edisyn.synth.alesisd4;
import edisyn.*;
import edisyn.gui.*;
import edisyn.util.*;
import java.awt.*;
import java.awt.geom.*;
import javax.swing.border.*;
import javax.swing.*;
import java.awt.event.*;
import java.util.*;
import java.io.*;
import javax.sound.midi.*;
/**
A patch editor for the Alesis D4 and DM5.
@author Sean Luke
*/
public class AlesisD4 extends Synth
{
public static final String[] PANS = { "<3", "<2", "<1", "--", "1>", "2", "3" };
public static final String[] GROUPS = { "Multi", "Single", "Group 1", "Group 2" };
public static final String[] KEYS = { "C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B" };
// These are NRPN parameter settings for various stuff we emit for individual parameters
public static final int NRPN_ROOT = 0x10;
public static final int NRPN_NOTE = 0x19;
public static final int NRPN_BANK = 0x08;
public static final int NRPN_NUMBER = 0x09;
public static final int NRPN_COARSE = 0x0A;
public static final int NRPN_FINE = 0x0B;
public static final int NRPN_VOLUME = 0x0C;
public static final int NRPN_PAN = 0x0D;
public static final int NRPN_OUTPUT = 0x0E;
public static final int NRPN_GROUP = 0x0F;
public static final int PAUSE_NRPN = 50;
// All the drum sounds are lumped together (see end of file). These are the numbers
// of each drum sound in each group -- I need this to break them out to send/receive sysex
public static final int[] D4_BANKS = { 99, 99, 55, 92, 76, 80 };
public static final int[] DM5_BANKS = { 95, 117, 71, 34, 36, 114, 65, 16 };
public int testNote = 60;
// choosers updated when changing D4 <--> D5M or changing the root note
Chooser[] drumChoosers = new Chooser[61];
JCheckBox check;
boolean dm5;
public static final String DM5_KEY = "DM5";
public boolean isDM5() { return dm5; }
boolean reenntrantBlock = false;
public void setDM5(boolean val, boolean store)
{
if (reenntrantBlock) return;
reenntrantBlock = true;
if (store) setLastX("" + (!val), DM5_KEY, getSynthClassName(), false);
dm5 = val;
updateTitle();
updateChoosers();
if (check != null) check.setSelected(dm5);
reenntrantBlock = false;
}
public static String getSynthName() { return "Alesis D4/DM5"; }
public AlesisD4()
{
model.set("number", 0);
String m = getLastX(DM5_KEY, getSynthClassName());
dm5 = (m == null ? false : !Boolean.parseBoolean(m));
VBox vbox = new VBox();
HBox hbox = new HBox();
JComponent sourcePanel = new SynthPanel(this);
vbox.add(addNameGlobal(Style.COLOR_GLOBAL()));
vbox.add(addTriggers(Style.COLOR_A()));
vbox.add(addDrums(0, 7, Style.COLOR_B()));
sourcePanel.add(vbox, BorderLayout.CENTER);
addTab("General and Drums 0-6", sourcePanel);
boolean primary = true;
for(int i = 7; i <= 60; i+= 18)
{
sourcePanel = new SynthPanel(this);
vbox = new VBox();
vbox.add(addDrums(i, i+18, primary ? Style.COLOR_A() : Style.COLOR_B()));
sourcePanel.add(vbox, BorderLayout.CENTER);
addTab("Drums " + i + "-" + (i + 17), sourcePanel);
primary = !primary;
}
model.set("name", "Untitled");
loadDefaults();
updateChoosers();
}
public String getDefaultResourceFileName()
{
if (isDM5()) return "AlesisDM5.init";
else return "AlesisD4.init";
}
public String getHTMLResourceFileName() { return "AlesisD4.html"; }
public boolean gatherPatchInfo(String title, Model change, boolean writing)
{
JTextField number = new SelectedTextField("" + model.get("number"), 3);
while(true)
{
boolean result = showMultiOption(this, new String[] { "Patch Number"},
new JComponent[] { number }, title, "Enter the Patch number");
if (result == false)
return false;
int n;
try { n = Integer.parseInt(number.getText()); }
catch (NumberFormatException e)
{
showSimpleError(title, "The Patch Number must be an integer 0...20");
continue;
}
if (n < 0 || n > 20)
{
showSimpleError(title, "The Patch Number must be an integer 0...20");
continue;
}
change.set("number", n);
return true;
}
}
/** Add the global patch category (name, id, number, etc.) */
public JComponent addNameGlobal(Color color)
{
Category globalCategory = new Category(this, getSynthName(), color);
//globalCategory.makeUnresettable();
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
HBox hbox2 = new HBox();
comp = new PatchDisplay(this, 9, false);
hbox2.add(comp);
check = new JCheckBox("DM5");
check.setSelected(dm5);
check.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent e)
{
setDM5(check.isSelected(), true);
}
});
check.setFont(Style.SMALL_FONT());
check.setOpaque(false);
check.setForeground(Style.TEXT_COLOR());
hbox2.addLast(check);
vbox.add(hbox2);
comp = new StringComponent("Patch Name", this, "name", 15, "Name must be up to 15 ASCII characters.")
{
public String replace(String val)
{
return revisePatchName(val);
}
public void update(String key, Model model)
{
super.update(key, model);
updateTitle();
}
};
vbox.add(comp);
hbox.add(vbox);
comp = new LabelledDial("Root Note", this, "drumsetnoteroot", color, 0, 67)
{
public void update(String key, Model model)
{
super.update(key, model);
updateDrumNoteLabels(); // gonna be expensive
}
public String map(int val)
{
return KEYS[val % 12] + (val / 12 - 2);
}
};
hbox.add(comp);
globalCategory.add(hbox, BorderLayout.WEST);
return globalCategory;
}
public String revisePatchName(String name)
{
name = super.revisePatchName(name);
if (name == null) name = "";
char[] chars = name.toCharArray();
for(int i = 0; i < chars.length; i++)
{
if (chars[i] < 32 || chars[i] > 127)
chars[i] = ' ';
}
return new String(chars);
}
public String getDrumNoteLabel(int note, int root)
{
// The default root is 36
int n = note + root;
if (n == 60) // middle C
return " Drum " + note + " ( " + n + ": Middle C )";
else
return " Drum " + note + " ( " + n + ": " + KEYS[n % 12] + (n / 12 - 2) + " )";
}
public void updateDrumNoteLabels()
{
int root = model.get("drumsetnoteroot", 36);
for(int i = 0; i < 61; i++)
if (drumChoosers[i] != null)
drumChoosers[i].getLabel().setText(getDrumNoteLabel(i, root));
repaint();
}
public void updateChoosers()
{
// disable listeners
boolean li = model.getUpdateListeners();
model.setUpdateListeners(false);
boolean un = undo.getWillPush();
undo.setWillPush(false);
for(int i = 0; i < 61; i++)
{
if (drumChoosers[i] != null)
{
int j = drumChoosers[i].getIndex();
if (!isDM5() && j >= D4_DRUMS.length) // too high
{
j = 0;
}
drumChoosers[i].setElements(drumChoosers[i].getLabelText(), isDM5() ? DM5_DRUMS : D4_DRUMS);
drumChoosers[i].setIndex(j);
}
}
// reenable listeners
model.setUpdateListeners(li);
undo.setWillPush(un);
repaint();
}
public JComponent addDrums(int start, int end, Color color)
{
Category category = new Category(this, "Drums " + start + " - " + (end - 1), color);
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
int root = model.get("drumsetnoteroot", 36);
for(int i = start; i < end; i+=2)
{
hbox = new HBox();
VBox vbox2 = new VBox();
params = D4_DRUMS;
comp = new Chooser(getDrumNoteLabel(i, root), this, "drum" + i + "voice", params);
vbox2.add(comp);
drumChoosers[i] = ((Chooser)comp);
HBox hbox2 = new HBox();
params = GROUPS;
comp = new CheckBox("Aux Out", this, "drum" + i + "output");
hbox2.add(comp);
comp = new Chooser("Groups", this, "drum" + i + "groups", params);
hbox2.add(comp);
vbox2.add(hbox2);
hbox.add(vbox2);
comp = new LabelledDial("Volume", this, "drum" + i + "volume", color, 0, 99);
hbox.add(comp);
comp = new LabelledDial("Pan", this, "drum" + i + "pan", color, 0, 6)
{
public String map(int val)
{
return PANS[val];
}
public boolean isSymmetric() { return true; }
};
hbox.add(comp);
comp = new LabelledDial("Coarse", this, "drum" + i + "coarse", color, 0, 7, 4)
{
public int getDefaultValue() { return 4; }
public double getStartAngle()
{
return 245;
}
};
hbox.add(comp);
comp = new LabelledDial("Fine", this, "drum" + i + "fine", color, 0, 99);
hbox.add(comp);
if (i < end - 1)
{
hbox.add(Strut.makeHorizontalStrut(30));
vbox2 = new VBox();
params = D4_DRUMS;
comp = new Chooser(getDrumNoteLabel((i + 1), root), this, "drum" + (i + 1) + "voice", params);
vbox2.add(comp);
drumChoosers[i + 1] = ((Chooser)comp);
hbox2 = new HBox();
params = GROUPS;
comp = new CheckBox("Aux Out", this, "drum" + (i + 1) + "output");
hbox2.add(comp);
comp = new Chooser("Groups", this, "drum" + (i + 1) + "groups", params);
hbox2.add(comp);
vbox2.add(hbox2);
hbox.add(vbox2);
comp = new LabelledDial("Volume", this, "drum" + (i + 1) + "volume", color, 0, 99);
hbox.add(comp);
comp = new LabelledDial("Pan", this, "drum" + (i + 1) + "pan", color, 0, 6)
{
public String map(int val)
{
return PANS[val];
}
public boolean isSymmetric() { return true; }
};
hbox.add(comp);
comp = new LabelledDial("Coarse", this, "drum" + (i + 1) + "coarse", color, 0, 7, 4)
{
public int getDefaultValue() { return 4; }
public double getStartAngle()
{
return 245;
}
};
hbox.add(comp);
comp = new LabelledDial("Fine", this, "drum" + (i + 1) + "fine", color, 0, 99);
hbox.addLast(comp);
}
vbox.add(hbox);
}
category.add(vbox, BorderLayout.CENTER);
return category;
}
public JComponent addTriggers(Color color)
{
Category category = new Category(this, "Triggers", color);
JComponent comp;
String[] params;
HBox hbox = new HBox();
comp = new LabelledDial("Footswitch", this, "footswitchclosing", color, 0, 60)
{
public String map(int val)
{
val += model.get("drumsetnoteroot", 0);
return KEYS[val % 12] + (val / 12 - 2);
}
};
getModel().register("drumsetnoteroot", ((LabelledDial)comp));
((LabelledDial)comp).addAdditionalLabel("Closing");
hbox.add(comp);
comp = new LabelledDial("Footswitch", this, "footswitchheld", color, 0, 60)
{
public String map(int val)
{
val += model.get("drumsetnoteroot", 0);
return KEYS[val % 12] + (val / 12 - 2);
}
};
getModel().register("drumsetnoteroot", ((LabelledDial)comp));
((LabelledDial)comp).addAdditionalLabel("Held");
hbox.add(comp);
for(int i = 1; i <= 12; i++)
{
comp = new LabelledDial("Trigger " + i + " ", this, "trigger" + i, color, 0, 60)
{
public String map(int val)
{
val += model.get("drumsetnoteroot", 0);
return KEYS[val % 12] + (val / 12 - 2);
}
};
getModel().register("drumsetnoteroot", ((LabelledDial)comp));
hbox.add(comp);
}
category.add(hbox, BorderLayout.CENTER);
return category;
}
public int getTestNotePitch() { return testNote; }
public int parse(byte[] data, boolean fromFile)
{
boolean d4 = (data[4] == 0x06);
int opcode = data[6];
int drumset = 0;
if (opcode == 0x01) // edit buffer
{
drumset = -1; // we'll say that's the edit buffer
}
if (opcode < 64 && opcode >= 32) // Drumset. Stuff >= 64 are requests.
{
drumset = (opcode - 32);
}
if (drumset <= 21) // we got one
{
setDM5(!d4, false);
if (drumset != -1)
model.set("number", drumset);
int pos = 7; // start of data
// get name
char[] name = new char[14];
for(int i = 0; i < 14; i++)
name[i] = (char)(data[pos++]);
model.set("name", new String(name));
model.set("drumsetnoteroot", data[pos++]);
model.set("footswitchclosing", data[pos++]);
model.set("footswitchheld", data[pos++]);
for(int i = 1; i <= 12; i++)
{
model.set("trigger" + i, data[pos++]);
}
// packets
for(int i = 0; i < 61; i++)
{
model.set("drum" + i + "volume", data[pos++]);
byte b = data[pos++];
model.set("drum" + i + "pan", b >>> 4);
model.set("drum" + i + "output", (b >>> 3) & 1);
int bank = (b & 7);
int number = data[pos++];
int banksum = 0;
for(int bb = 0; bb < bank; bb++)
banksum += (d4 ? D4_BANKS[bb > 5 ? 0 : bb] : DM5_BANKS[bb > 7 ? 0 : bb]);
// assemble bank and number into voice
model.set("drum" + i + "voice", banksum + number);
model.set("drum" + i + "fine", data[pos++]);
b = data[pos++];
model.set("drum" + i + "groups", (b >>> 3) & 7);
model.set("drum" + i + "coarse", b & 7);
}
return PARSE_SUCCEEDED;
}
else return PARSE_FAILED;
}
public int map(int i, int max)
{
int v = (int)((i * 127.0 + max) / max);
if (v > 127) v = 127;
return v;
}
public Object[] emitAll(String key)
{
if (key.equals("drumsetnoteroot"))
{
ArrayList data = new ArrayList();
final int total = 68;
Object[] nrpn = buildNRPN(getChannelOut(), NRPN_ROOT, (128 * map(model.get(key), total-1)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
return (Object[])(data.toArray(new Object[0]));
}
else if (key.startsWith("drum") && !key.equals("drumsetnoteroot"))
{
// we can't emit a parameter, but at least we can set up
// the preview button to play it
int drum = StringUtility.getFirstInt(key);
// compute bank and number
int voice = model.get("drum" + drum + "voice");
int bank = -1;
int number = -1;
for(int j = 0; j < (isDM5() ? DM5_BANKS.length : D4_BANKS.length); j++)
{
if (voice < (isDM5() ? DM5_BANKS[j] : D4_BANKS[j]))
{
bank = j;
number = voice;
break;
}
else
voice -= (isDM5() ? DM5_BANKS[j] : D4_BANKS[j]);
}
if (bank == -1) // error, should never happen
{
System.err.println("ERROR (AlesisD4.emit): bank and voice are bad for " + isDM5() + " " + model.get("drum" + drum + "voice"));
return new Object[0];
}
//// IMPORTANT NOTE
////
//// True to form, the NRPN documentation in Alesis's D4 service manual is completely wrong.
//// The manual states that to compute the NRPN MSB value, you take the current value, multiply by 127,
//// then divide by the maximum value. They even give a (wrong) example: to compute the value for
//// volume = 50 (volume goes 0...99), you do 50 * 127 / 99 = 64. WRONG WRONG WRONG.
////
//// The correct formula is unknown. However for all parameters below except for COARSE TUNING,
//// I have had success with the equation shown in map(i, max) above. For COARSE TUNING below
//// I have a custom equation which seems to work right.
////
//// Absolutely nowhere on the internet does a correction appear for this. Apparently nobody noticed.
////
//// I can only test on the D4, so I am not certain if these equations will work properly for the drum
//// voices for the DM5. I need someone else to test for me.
ArrayList data = new ArrayList();
// I believe this sets the note that preview is playing, and also (?)
// more importantly, the later changes after it will change that particular note.
// Maybe?
Object[] nrpn = null;
// always
testNote = drum + model.get("drumsetnoteroot");
nrpn = buildNRPN(getChannelOut(), NRPN_NOTE, 128 * map(drum, 60));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
if (key.endsWith("voice"))
{
int total = (isDM5() ? DM5_BANKS.length : D4_BANKS.length);
nrpn = buildNRPN(getChannelOut(), NRPN_BANK, (128 * ((bank + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
total = (isDM5() ? DM5_BANKS[bank]: D4_BANKS[bank]);
//int math = ((number + 1) * 127) / total;
//int math = ((number + 1 - 1) * 127) / (total - 1);
nrpn = buildNRPN(getChannelOut(), NRPN_NUMBER, 128 * map(number, total-1));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("coarse"))
{
final int total = 8;
// my mapping function doesn't work for this one
// nrpn = buildNRPN(getChannelOut(), NRPN_COARSE, (128 * map(model.get(key), total-1)));
nrpn = buildNRPN(getChannelOut(), NRPN_COARSE, (128 * ((model.get(key) + 1) * 127 / total))); // this one works however
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("fine"))
{
final int total = 100;
nrpn = buildNRPN(getChannelOut(), NRPN_FINE, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("volume"))
{
final int total = 100;
nrpn = buildNRPN(getChannelOut(), NRPN_VOLUME, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("pan"))
{
final int total = 7;
nrpn = buildNRPN(getChannelOut(), NRPN_PAN, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("output"))
{
final int total = 2;
nrpn = buildNRPN(getChannelOut(), NRPN_OUTPUT, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("groups"))
{
final int total = 4;
nrpn = buildNRPN(getChannelOut(), NRPN_GROUP, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else
{
System.err.println("ERROR (AlesisD4.emit): unknown key " + key + ", should never happen.");
return new Object[0];
}
return (Object[])(data.toArray(new Object[0]));
}
else
{
return new Object[0];
}
}
public byte[] emit(Model tempModel, boolean toWorkingMemory, boolean toFile)
{
if (tempModel == null)
tempModel = getModel();
boolean d4 = !isDM5();
byte[] data = new byte[343];
data[0] = (byte)0xF0;
data[1] = (byte)0x00;
data[2] = (byte)0x00;
data[3] = (byte)0x0E;
data[4] = (byte)(d4 ? 0x06 : 0x13);
data[5] = (byte)getChannelOut();
data[6] = (byte)(toWorkingMemory ? 0x01 : 32 + tempModel.get("number"));
int pos = 7; // start of data
String name = model.get("name", " ") + " ";
for(int i = 0; i < 14; i++)
data[pos++] = (byte)(name.charAt(i));
data[pos++] = (byte)model.get("drumsetnoteroot");
data[pos++] = (byte)model.get("footswitchclosing");
data[pos++] = (byte)model.get("footswitchheld");
for(int i = 1; i <= 12; i++)
{
data[pos++] = (byte)model.get("trigger" + i);
}
// packets
for(int i = 0; i < 61; i++)
{
// compute bank and number
int voice = model.get("drum" + i + "voice");
int bank = -1;
int number = -1;
for(int j = 0; j < (isDM5() ? DM5_BANKS.length : D4_BANKS.length); j++)
{
if (voice < (isDM5() ? DM5_BANKS[j] : D4_BANKS[j]))
{
bank = j;
number = voice;
break;
}
else
voice -= (isDM5() ? DM5_BANKS[j] : D4_BANKS[j]);
}
if (bank == -1) // error, should never happen
{
System.err.println("ERROR (AlesisD4.emit): bank and voice are bad for " + isDM5() + " " + model.get("drum" + i + "voice"));
return new byte[0];
}
data[pos++] = (byte)model.get("drum" + i + "volume");
data[pos++] = (byte)((model.get("drum" + i + "pan") << 4) |
(model.get("drum" + i + "output") << 3) |
bank);
data[pos++] = (byte)number;
data[pos++] = (byte)model.get("drum" + i + "fine");
data[pos++] = (byte)((model.get("drum" + i + "groups") << 3) |
model.get("drum" + i + "coarse"));
}
// compute checksum
int checksum = 0;
for(int i = 7; i < data.length - 2; i++)
{
checksum += data[i];
}
checksum = checksum & 127;
data[data.length - 2] = (byte)checksum;
data[data.length - 1] = (byte)0xF7;
return data;
}
public byte[] requestDump(Model tempModel)
{
if (tempModel == null)
tempModel = getModel();
byte NN = (byte)(tempModel.get("number") + 32 + 64);
byte TYPE = (byte)(isDM5() ? 0x13 : 0x06);
return new byte[] { (byte)0xF0, 0x00, 0x00, 0x0E, TYPE, (byte)getChannelOut(), NN, (byte)0xF7 };
}
public byte[] requestCurrentDump()
{
byte TYPE = (byte)(isDM5() ? 0x13 : 0x06);
return new byte[] { (byte)0xF0, 0x00, 0x00, 0x0E, TYPE, (byte)getChannelOut(), 65, (byte)0xF7 };
}
public String getPatchName(Model model)
{
return model.get("name", "Untitled");
}
public String getPatchLocationName(Model model)
{
int num = model.get("number", 0);
if (num < 10) return "0" + num;
else return "" + num;
}
public Model getNextPatchLocation(Model model)
{
int number = model.get("number");
number++;
if (number >= 21)
{
number = 0;
}
Model newModel = buildModel();
newModel.set("number", number);
return newModel;
}
public int getPauseAfterChangePatch() { return 200; }
public int getPauseAfterWritePatch() { return 2000; }
public void changePatch(Model tempModel)
{
tryToSendMIDI(buildPC(getChannelOut(), tempModel.get("number")));
}
public static final String[] D4_DRUMS = new String[]
{
// I note that D4 drums start with 1, whereas DM5 drums start with 0
"K 1 Big \"O\"",
"K 2 Stomp",
"K 3 Industry",
"K 4 Spiked",
"K 5 Spike RM",
"K 6 Spike HL",
"K 7 Pillow",
"K 8 Plated",
"K 9 Hi Foot",
"K 10 Hi Foot RM",
"K 11 Foot",
"K 12 Foot RM",
"K 13 Foot HL",
"K 14 Foot GT",
"K 15 6O's R&B",
"K 16 R&B Room",
"K 17 Monster",
"K 18 Mnstr RM",
"K 19 Gtd Mnstr",
"K 20 Dbl Head",
"K 21 Dbl RM",
"K 22 Dbl HL",
"K 23 Thrash",
"K 24 Stab",
"K 25 Brt Stab",
"K 26 Stab RM",
"K 27 Stab HL",
"K 28 Reggae",
"K 29 Kinetic",
"K 30 C&W #1",
"K 31 22\" Power",
"K 32 Amb 22 Pwr",
"K 33 22\" Pwr RM",
"K 34 Solid!!",
"K 95 Solid RM",
"K 36 Tiled",
"K 37 Tiled RM",
"K 38 Chunky",
"K 39 Chunky RM",
"K 40 Chunky HL",
"K 41 Loose One",
"K 42 Amb Loose",
"K 43 Slammin'",
"K 44 Slam Room",
"K 45 22\" Deep",
"K 46 Amb Deep",
"K 47 Deep Room",
"K 48 Movin' Air",
"K 49 Mstr Mash",
"K 50 Swift",
"K 51 Abrupt",
"K 52 Fusion",
"K 53 Muffled",
"K 54 Blaster",
"K 55 24\" Power",
"K 56 Amb 24\"",
"K 57 24\" Room",
"K 58 Deep 24\"",
"K 59 Ballad",
"K 60 Big Foot",
"K 61 8 Foot RM",
"K 62 The Thud",
"K 63 Amb Thud",
"K 64 Thud Hall",
"K 65 C&W #2",
"K 66 Tite",
"K 67 Tite Room",
"K 68 Tite Hall",
"K 69 Chopped",
"K 70 Flanged",
"K 71 Deep Wood",
"K 72 Wood Room",
"K 73 Head Punch",
"K 74 Amb Punch",
"K 75 Garage",
"K 76 Studio",
"K 77 Hanger",
"K 78 Lo Wood",
"K 79 Lo Wood HL",
"K 80 Rap",
"K 81 Rumble",
"K 82 DDL Slap",
"K 83 Trendy",
"K 84 Elec..",
"K 85 ..tronic",
"K 86 Ambnt Elc.",
"K 87 Amb Tronic",
"K 88 Faazed",
"K 89 Faazed RM",
"K 90 Tite Head",
"K 91 Hi Wisp",
"K 92 LoWisp",
"K 93 Hi Wet Wisp",
"K 94 Lo Wet Wisp",
"K 95 Techno",
"K 96 Techno RM",
"K 97 Lo Techno",
"K 98 Lo Tech RM",
"K 99 Coliseum",
"S 1 Raw Hide",
"S 2 Fat City",
"S 3 Ambient",
"S 4 Torqued",
"S 5 Classic",
"S 6 Classic RM",
"S 7 Hi Class",
"S 8 Hi Class RM",
"S 9 MIT Gate",
"S 10 Dry Combo",
"S 11 Combo RM",
"S 12 Combo Plate",
"S 13 Flange",
"S 14 Media Hype",
"S 15 Rim->Center",
"S 16 Center->Rim",
"S 17 Squwank",
"S 18 Standard",
"S 19 Brighter",
"S 20 Darker",
"S 21 Hi Gated",
"S 22 Lo Gated",
"S 23 Deep Dry",
"S 24 Nasty",
"S 25 Piccolo",
"S 26 Wet Piccolo",
"S 27 Hi Piccolo",
"S 28 Amb Hi Picc",
"S 29 Aerolux",
"S 30 Wood",
"S 31 Wood Room",
"S 32 Wood Shed",
"S 33 Rap",
"S 34 Lo Rap",
"S 35 Flanger",
"S 36 Cracker Box",
"S 37 Bigger Box",
"S 38 Art O Fish",
"S 39 Air Burst",
"S 40 Edged In",
"S 41 Edged Out",
"S 42 ..Wet 40",
"S 43 ..Wet 41",
"S 44 Compressed",
"S 45 W/Verb",
"S 46 Wrap",
"S 47 Attak Rap",
"S 48 Brassy",
"S 49 Brassy RM",
"S 50 Brutal",
"S 51 Lo Brutal",
"S 52 Crisp Hit",
"S 53 Crisp RM",
"S 54 Pop 'n Pic",
"S 55 Alloy",
"S 56 Aggressive",
"S 57 WetAggrssv",
"S 58 Clik Clak",
"S 59 Chrome",
"S 60 Reverb",
"S 61 Media",
"S 62 Overtone",
"S 63 Overtone RM",
"S 64 Verbose",
"S 65 Dance!",
"S 66 Grrrated",
"S 67 Dry Punch",
"S 68 Amb Punch",
"S 69 Spiked..",
"S 70 Bad Punch",
"S 71 Pop Shot",
"S 72 Lo Pop Shot",
"S 73 Flng Shot",
"S 74 Studio A",
"S 75 Studio B",
"S 76 Hyper Pic",
"S 77 Hi Elect",
"S 78 Hi Wet Lct",
"S 79 Electric",
"S 80 Wet Electric",
"S 81 Bitchin'",
"S 82 Fringe",
"S 83 Trbo Drive",
"S 84 Play Room",
"S 85 Slap It!",
"S 86 Un Natural",
"S 87 Arena",
"S 88 Brush",
"S 89 Brush Hit",
"S 90 Tote Stik",
"S 91 Stick",
"S 92 Big Stik",
"S 93 Gated",
"S 94 Rim Shot",
"S 95 R-Shot RM",
"S 96 Gunner",
"S 97 Sm Ballad",
"S 98 W/Tmbrne",
"S 99 Bg Ballad",
"C 1 R&B Hat",
"C 2 14\" Thin",
"C 3 Dyno Edge",
"C 4 14\" Med",
"C 5 14\" Tite",
"C 6 Power Hat",
"C 7 Rock Tite",
"C 8 Rock Edge",
"C 9 Tension",
"C 10 Jazzed!",
"C 11 Hard Hat",
"C 12 Rock Tip",
"C 13 Sputt..",
"C 14 Ambient",
"C 15 Wet Hat",
"C 16 Wetter..",
"C 17 Half Open",
"C 18 Wet Half",
"C 19 Rock Half",
"C 20 Clutched",
"C 21 The Edge",
"C 22 Trashy",
"C 23 Open It Up",
"C 24 Rattle",
"C 25 Wet Rattle",
"C 26 Close->Open",
"C 27 Open->Close",
"C 28 Hard Foot",
"C 29 Soft Foot",
"C 30 Wet Foot",
"C 31 Edge->Bell",
"C 32 Bell->Edge",
"C 33 The Swing",
"C 34 Flat Ride",
"C 35 60's Flng",
"C 36 Dark Ride",
"C 37 Ping Ride",
"C 38 Bell Ride",
"C 39 Flng Jazz",
"C 40 Flng Rock",
"C 41 Hi Crash",
"C 42 Lo Crash",
"C 43 10\" Splash",
"C 44 Medium",
"C 45 Lo Medium",
"C 46 20\" China",
"C 47 18\" Crash",
"C 48 Tiny",
"C 49 20\" Bronze",
"C 50 Flng Crsh",
"C 51 Lo Pang",
"C 52 Slo Crash",
"C 53 Flng Pang",
"C 54 Lft & Rght",
"C 55 Big L&R",
"T 1 Hi Power",
"T 2 Med Power",
"T 3 Low Power",
"T 4 Pwr Floor",
"T 5 Hi Thrash",
"T 6 Md Thrash",
"T 7 Low Thrash",
"T 8 Hi Slam",
"T 9 Low Slam",
"T 10 Hi Slam RM",
"T 11 Lo Slam RM",
"T 12 Hi Studio",
"T 13 Md Studio",
"T 14 Low Studio",
"T 15 Flr Studio",
"T 16 Low Flr Std",
"T 17 Ambnt Hi",
"T 18 Ambnt Md",
"T 19 Ambnt Low",
"T 20 Ambnt Flr",
"T 21 Hi Wet",
"T 22 Mid Wet",
"T 23 Low Wet",
"T 24 Floor Wet",
"T 25 Low Flr Wet",
"T 26 Hi Blade",
"T 27 Md Blade",
"T 28 Low Blade",
"T 29 Hi Stereo",
"T 30 Md Stereo",
"T 31 Low Stereo",
"T 32 Hi Return",
"T 33 Md Return",
"T 34 Low Return",
"T 35 10\" Stark",
"T 36 12\" Stark",
"T 37 14\" Stark",
"T 38 16\" Stark",
"T 39 Hi Cannon",
"T 40 Md Cannon",
"T 41 Low Cannon",
"T 42 XLow Cannon",
"T 43 Hi Cannon HL",
"T 44 Md Cann HL",
"T 45 Low Cann HL",
"T 46 XLo Cann HL",
"T 47 Hi Dbl",
"T 48 Md Dbl",
"T 49 Low Dbl",
"T 50 Hi Dbl RM",
"T 51 Md Dbl RM",
"T 52 Low Dbl RM",
"T 53 Hi Clear",
"T 54 Md Clear",
"T 55 Low Clear",
"T 56 Amb Hi Clear",
"T 57 Amb Md Clr",
"T 58 Amb La Clr",
"T 59 Hi Clr Wet",
"T 60 Md Clr Wet",
"T 61 Low Clr Wet",
"T 62 Ol' Hex 1",
"T 63 Ol' Hex 2",
"T 64 Ol' Hex 3",
"T 65 Wet Hex 1",
"T 66 Wet Hex 2",
"T 67 Wet Hex 3",
"T 68 Hi Dynamic",
"T 69 Md Dynamic",
"T 70 Low Dynamic",
"T 71 Hi D Amb",
"T 72 Md D Amb",
"T 73 Low D Amb",
"T 74 Hi Plate",
"T 75 Md Plate",
"T 76 Low Plate",
"T 77 Hi Media",
"T 78 Md Media",
"T 79 Low Media",
"T 80 Hi Flange",
"T 81 Mid Flange",
"T 82 Low Flange",
"T B3 Hi Aggrssv",
"T 84 Md Aggrssv",
"T 85 Low Aggrssv",
"T 86 Hi Ring",
"T 87 Low Ring",
"T 88 Hi Ring RM",
"T 89 Low Ring RM",
"T 90 Hi Phase",
"T 91 Md Phase",
"T 92 Low Phase",
"P 1 Talk Up",
"P 2 Talk Down",
"P 3 Squeezed",
"P 4 Released",
"P 5 Shaker",
"P 6 Hi Timble",
"P 7 Mid Timble",
"P 8 Low Timble",
"P 9 Hi W/Verb",
"P 10 Mid W/Verb",
"P 11 Low W/Verb",
"P 12 Conga",
"P 13 Low Conga",
"P 14 Hi Open",
"P 15 Low Open",
"P 16 Conga Slap",
"P 17 Low Slap",
"P 18 Dynamic A",
"P 19 Dynamic B",
"P 20 Dynamic C",
"P 21 Hi Vibra",
"P 22 Low Vibra",
"P 23 Hi Bongo",
"P 24 Low Bongo",
"P 25 Hi Cow",
"P 26 Med Cow",
"P 27 Low Cow",
"P 28 Heifer",
"P 29 Guernsey",
"P 30 Holstein",
"P 31 Torpedo",
"P 32 Low Torpedo",
"P 33 Hi Agogo",
"P 34 Low Agogo",
"P 35 Hi Muted",
"P 36 Med Muted",
"P 37 Low Muted",
"P 38 Hi Wood",
"P 39 Med Wood",
"P 40 Lo Wood",
"P 41 Hi Block",
"P 42 Med Block",
"P 43 Low Block",
"P 44 Hi Folley",
"P 45 Med Folley",
"P 46 Low Folley",
"P 47 Hi Synth",
"P 48 Mid Synth",
"P 49 Low Synth",
"P 50 Flg Synth",
"P 51 Cabasa",
"P 52 Fast Cabasa",
"P 53 Long Cabasa",
"P 54 Marabasa",
"P 55 Tambrine",
"P 56 Dark Tambrn",
"P 57 Hard Tambrn",
"P 58 Hi Sticks",
"P 59 Med Sticks",
"P 60 Low Sticks",
"P 61 Finger Snaps",
"P 62 Power Snap",
"P 63 Wide Snap",
"P 64 Hand Clap",
"P 65 Gated Claps",
"P 66 Hi Clave",
"P 67 Lo Clave",
"P 68 Triangle",
"P 69 Dinner Bell",
"P 70 Maracas",
"P 71 Low Maracas",
"P 72 Fast Maracas",
"P 73 Far East",
"P 74 Far West",
"P 75 Odd Shake",
"P 76 Bead Bag",
"E 1 Gut Wrench",
"E 2 Xylimbal",
"E 3 Xylimbal 2",
"E 4 Xylimbal 3",
"E 5 Layr Bell",
"E 6 Hi Lip Pop",
"E 7 Lip Pop",
"E 8 Loose Lip",
"E 9 Door Slam",
"E 10 Puh!",
"E 11 Puh-tooy",
"E 12 Scrape It",
"E 13 Broken",
"E 14 Scratch",
"E 15 Cat Scratch",
"E 16 Slow Scratch",
"E 17 Un Bottled",
"E 18 Air Wrench",
"E 19 Trq Wrench",
"E 20 Pwer Wrnch",
"E 21 Fat Frog",
"E 22 Anvil",
"E 23 Trash Lid",
"E 24 Trash Can",
"E 25 Dumpster",
"E 26 Firecracker",
"E 27 China Break",
"E 28 Glass Break",
"E 29 Window Brk",
"E 30 Chopstix",
"E 31 Bottle",
"E 32 Low Bottle",
"E 33 Jug",
"E 34 STorpedo",
"E 35 Hi Whip",
"E 36 Low Whip",
"E 37 Whippit",
"E 38 Tomb Slam",
"E 39 Hollow 1",
"E 40 Hollow 2",
"E 41 Hollow 3",
"E 42 Hollow 4",
"E 43 Hollow 5",
"E 44 Hollow 6",
"E 45 Hollow 7",
"E 46 Hollow 8",
"E 47 Blip",
"E 48 Big Blip",
"E 49 Sour Milk",
"E 50 Hi Thang",
"E 51 Thang",
"E 52 Low Thang",
"E 53 A Squib?",
"E 54 A Squab?",
"E 55 Hi Pipe",
"E 56 Mid Pipe",
"E 57 Low Pipe",
"E 58 Hi Ethnic",
"E 59 Med Ethnic",
"E 60 Low Ethnic",
"E 61 Bent Bongo",
"E 62 Re-Bent",
"E 63 Hi Filter",
"E 64 Low Filter",
"E 65 Ratl Boom",
"E 66 Face Slap",
"E 67 Heavy Metal",
"E 68 Lite Metal",
"E 69 Clatter",
"E 70 Bamboo",
"E 71 Bamb Cmbo",
"E 72 Digital",
"E 73 Tamboo",
"E 74 Schizoid",
"E 75 Thunder",
"E 76 Analouge",
"E 77 Re-Synth",
"E 78 L To R",
"E 79 Saucers?",
"E 80 Silence"
};
public static final String[] DM5_DRUMS = new String[]
{
// I note that D4 drums start with 1, whereas DM5 drums start with 0
"K 0 Arena",
"K 1 Producer",
"K 2 Pwr Rock",
"K 3 Fat Head",
"K 4 Dark Fat",
"K 5 Passion",
"K 6 Holo",
"K 7 WarmKick",
"K 8 SpeedMtl",
"K 9 Plastine",
"K 10 Back Mic",
"K 11 FrontMic",
"K 12 Lite",
"K 13 RubbrBtr",
"K 14 Simple",
"K 15 Basic",
"K 16 Slammin'",
"K 17 Foot",
"K 18 Bch Ball",
"K 19 LowSolid",
"K 20 Feels Gd",
"K 21 Pillow",
"K 22 Fusion",
"K 23 Reggae",
"K 24 Kinetica",
"K 25 Brt Ambi",
"K 26 Hi Gate",
"K 27 Med Room",
"K 28 Lrg Room",
"K 29 Forum",
"K 30 Punchy",
"K 31 InTheKik",
"K 32 Big One",
"K 33 Bonk",
"K 34 RockClub",
"K 35 MyTribe",
"K 36 RoundAmb",
"K 37 RoundAtk",
"K 38 HardAttk",
"K 39 Blitz",
"K 40 9oh9Kik1",
"K 41 9oh9Kik2",
"K 42 9oh9Kik3",
"K 43 Native",
"K 44 AnaKick",
"K 45 Mangler",
"K 46 SuprRave",
"K 47 Spud",
"K 48 Rap Wave",
"K 49 Beat Box",
"K 50 WeR Borg",
"K 51 Indscpln",
"K 52 SonarWav",
"K 53 60Cycles",
"K 54 Motor",
"K 55 Stages",
"K 56 Cybrwave",
"K 57 Cybo",
"K 58 BrainEtr",
"K 59 Squish",
"K 60 Crunch",
"K 61 Thump",
"K 62 CrnchHed",
"K 63 CrnchFlp",
"K 64 Pwr Down",
"K 65 Hardware",
"K 66 JunkDrwr",
"K 67 Junk Man",
"K 68 LooseLug",
"K 69 Carpet",
"K 70 Smoke",
"K 71 Aggresor",
"K 72 BadBreth",
"K 73 King",
"K 74 Xpando",
"K 75 Deep IIx",
"K 76 Dry IIx",
"K 77 Hex Kick",
"K 78 Fat Boy",
"K 79 Techtik",
"K 80 Skool",
"K 81 KidStuff",
"K 82 Scratchr",
"K 83 Afro",
"K 84 Cuban",
"K 85 Tribal",
"K 86 Steak",
"K 87 Hazey",
"K 88 Koosh",
"K 89 Bowels",
"K 90 Obergeil",
"K 91 HiEnergy",
"K 92 Undrwrld",
"K 93 Cruiser",
"K 94 Plumbing",
"S 0 Get Real",
"S 1 Big Rim",
"S 2 Woodclif",
"S 3 Hip Hop",
"S 4 Heartlnd",
"S 5 PwrBalld",
"S 6 Session",
"S 7 Funky",
"S 8 Choked",
"S 9 Crome",
"S 10 ChromRng",
"S 11 ChromeHi",
"S 12 Beauty",
"S 13 Piccolo",
"S 14 Fat Picc",
"S 15 Hi Ambi",
"S 16 MicroPic",
"S 17 PiccRoom",
"S 18 Low Picc",
"S 19 NicePicc",
"S 20 Gun Picc",
"S 21 Dyn Picc",
"S 22 Velo>Rim",
"S 23 Tiny E",
"S 24 Crisp",
"S 25 Clean",
"S 26 Cadence",
"S 27 DryShell",
"S 28 TopBrass",
"S 29 UltraThn",
"S 30 Kamko",
"S 31 Hawaii",
"S 32 BluSprkl",
"S 33 Bronze",
"S 34 Hard Rim",
"S 35 Vintage",
"S 36 Weasel",
"S 37 WetWeasl",
"S 38 Has Edge",
"S 39 WithClap",
"S 40 Raunchy",
"S 41 DeepRoom",
"S 42 SlapRoom",
"S 43 WarmRoom",
"S 44 AnaKick",
"S 45 LongTail",
"S 46 ExtraLrg",
"S 47 Big Hall",
"S 48 BigPlate",
"S 49 Compresd",
"S 50 Solar",
"S 51 Far Away",
"S 52 Postmdrn",
"S 53 Loose",
"S 54 Grinder",
"S 55 Freaky",
"S 56 Woody",
"S 57 ThinSkin",
"S 58 Crank It",
"S 59 Snareo",
"S 60 TightLug",
"S 61 Ibid",
"S 62 Beefrank",
"S 63 SlowFunk",
"S 64 Low Ring",
"S 65 FreakRim",
"S 66 MetlHarm",
"S 67 Groovy",
"S 68 Splat",
"S 69 RatlWood",
"S 70 Trashier",
"S 71 8oh8 Snr",
"S 72 8oh8 Rim",
"S 73 8oh8 Tin",
"S 74 Krafty",
"S 75 MetlPipe",
"S 76 9oh9 Snr",
"S 77 9oh9 Rim",
"S 78 Release",
"S 79 City",
"S 80 U Bahn",
"S 81 Gritty",
"S 82 Fat Grit",
"S 83 Rank",
"S 84 BrikHaus",
"S 85 Overtone",
"S 86 DingoBoy",
"S 87 Wonk",
"S 88 HexSnare",
"S 89 IIxSnare",
"S 90 70'sFunk",
"S 91 Ol Skool",
"S 92 Stutter",
"S 93 ThikGate",
"S 94 MetalGat",
"S 95 Face Beat",
"S 96 Thrasher",
"S 97 Shred",
"S 98 Pipe Bomb",
"S 99 Clanker",
"S 100 Blast",
"S 101 Assault",
"S 102 Speck",
"S 103 Spectral",
"S 104 OrchRoom",
"S 105 OrchHall",
"S 106 OrchRoll",
"S 107 BrushFat",
"S 108 BrushThn",
"S 109 BrushRim",
"S 110 Jazz Hit",
"S 111 Stik>Snr",
"S 112 DryStick",
"S 113 LiveStik",
"S 114 DeepStik",
"S 115 StikRoom",
"S 116 AmbiStik",
"T 0 Hero Hi",
"T 1 Hero Mid",
"T 2 Hero Low",
"T 3 Hero Flr",
"T 4 Open Hi",
"T 5 Open Mid",
"T 6 Open Low",
"T 7 PinstrpH",
"T 8 PinstrpM",
"T 9 PinstrpL",
"T 10 StudioHi",
"T 11 StudioMd",
"T 12 StudioLo",
"T 13 Big O Hi",
"T 14 Big O Lo",
"T 15 Girth Hi",
"T 16 Girth Lo",
"T 17 InsideHi",
"T 18 InsideMd",
"T 19 InsideLo",
"T 20 Jazz Hi",
"T 21 Jazz Low",
"T 22 Hall Hi",
"T 23 Hall Mid",
"T 24 Hall Low",
"T 25 Hall Flr",
"T 26 Psilo Hi",
"T 27 PsiloMid",
"T 28 PsiloLow",
"T 29 PsiloFlr",
"T 30 CannonHi",
"T 31 CannonMd",
"T 32 CannonLo",
"T 33 CannonFl",
"T 34 CanFlngH",
"T 35 CanFlngM",
"T 36 CanFlngL",
"T 37 Ballo Hi",
"T 38 BalloLow",
"T 39 MakRakHi",
"T 40 MakRakMd",
"T 41 MakRakLo",
"T 42 MakRakFl",
"T 43 Omega Hi",
"T 44 Omega Md",
"T 45 Omega Lo",
"T 46 Omega Fl",
"T 47 Salvo Hi",
"T 48 Salvo Md",
"T 49 Salvo Lo",
"T 50 Hex Hi",
"T 51 Hex Mid",
"T 52 Hex Low",
"T 53 HexFloor",
"T 54 ClascHex",
"T 55 Noise Hi",
"T 56 Noise Lo",
"T 57 Exo Hi",
"T 58 Exo Mid",
"T 59 Exo Low",
"T 60 OilCanHi",
"T 61 OilCanLo",
"T 62 8oh8 Hi",
"T 63 8oh8 Mid",
"T 64 8oh8 Low",
"T 65 Bit TomH",
"T 66 Bit TomL",
"T 67 BombTomH",
"T 68 BombTomM",
"T 69 BombTomL",
"T 70 Mad Roto",
"H 0 BrtTite1",
"H 1 BrtTite2",
"H 2 Brt Clsd",
"H 3 Brt Half",
"H 4 BrtLoose",
"H 5 BrtLoosr",
"H 6 DynBrt 1",
"H 7 DynBrt 2",
"H 8 Brt Open",
"H 9 Brt Foot",
"H 10 SR Clsd",
"H 11 SR Half",
"H 12 SR Open",
"H 13 LiteClsd",
"H 14 Lite Dyn",
"H 15 LiteHalf",
"H 16 LiteOpen",
"H 17 FlngClsd",
"H 18 FlngHalf",
"H 19 FlngOpen",
"H 20 Rok Clsd",
"H 21 RokLoose",
"H 22 RokSlosh",
"H 23 Rok Open",
"H 24 Rok Foot",
"H 25 8oh8Clsd",
"H 26 8oh8Open",
"H 27 Rap Clsd",
"H 28 Rap Half",
"H 29 Rap Open",
"H 30 Zip Clsd",
"H 31 Zip Open",
"H 32 Zap Clsd",
"H 33 Zap Open",
"C 0 Ride Cym",
"C 1 VeloRide",
"C 2 PingRide",
"C 3 Exotic",
"C 4 RideBell",
"C 5 TransBel",
"C 6 El Bell",
"C 7 Avantia",
"C 8 CymParts",
"C 9 BrtCrash",
"C 10 Ster Brt",
"C 11 DrkCrash",
"C 12 SterDark",
"C 13 LR Crsh1",
"C 14 LR Crsh2",
"C 15 IceCrash",
"C 16 ZootMute",
"C 17 DrtyMute",
"C 18 Splash",
"C 19 MicroCym",
"C 20 8 Splash",
"C 21 China",
"C 22 SterChna",
"C 23 Woo Han",
"C 24 Doppler",
"C 25 TipShank",
"C 26 SterPhaz",
"C 27 Hammered",
"C 28 EastWest",
"C 29 Orch Cym",
"C 30 8oh8Crsh",
"C 31 8CrashFl",
"C 32 Syn Pang",
"C 33 SynCrash",
"C 34 BlastCym",
"C 35 Noiz Cym",
"P 0 Agogo Hi",
"P 1 Agogo Lo",
"P 2 AgoPitch",
"P 3 Noggin",
"P 4 Reco Hi",
"P 5 Reco Lo",
"P 6 Clay Pot",
"P 7 Triangle",
"P 8 Tri Mute",
"P 9 TriPitch",
"P 10 DrumStix",
"P 11 Cowbell",
"P 12 Tambrine",
"P 13 TamPitch",
"P 14 Sleighbl",
"P 15 Snowjob",
"P 16 Cabasa",
"P 17 SharpShk",
"P 18 TikTak",
"P 19 Maracas",
"P 20 ShakerHi",
"P 21 ShakerLo",
"P 22 Bead Pot",
"P 23 BeadShk1",
"P 24 BeadShk2",
"P 25 BeadShk3",
"P 26 SynShkr1",
"P 27 SynShkr2",
"P 28 SynShkrD",
"P 29 Rattle",
"P 30 CrashrHd",
"P 31 CrashrSf",
"P 32 Rainshak",
"P 33 RainStik",
"P 34 Gravel",
"P 35 RatlBwap",
"P 36 Bongo Hi",
"P 37 BngHiSlp",
"P 38 Bongo Lo",
"P 39 BngLoSlp",
"P 40 Conga Hi",
"P 41 Conga Lo",
"P 42 CongaSlp",
"P 43 Slap Dyn",
"P 44 Screech",
"P 45 Cuica Hi",
"P 46 Cuica Lo",
"P 47 AmIndian",
"P 48 Tatonka",
"P 49 WarPaint",
"P 50 BoLanGoo",
"P 51 BoLanDyn",
"P 52 BreketaH",
"P 53 BreketaL",
"P 54 BrktaDyn",
"P 55 Elephant",
"P 56 GhatamHi",
"P 57 GhatamLo",
"P 58 Udu",
"P 59 Ethnika",
"P 60 Amazon",
"P 61 Nagara",
"P 62 Oobla Hi",
"P 63 Oobla Lo",
"P 64 OoblaDyn",
"P 65 Paah",
"P 66 Ethno",
"P 67 EasternV",
"P 68 TalkngHi",
"P 69 TalkngLo",
"P 70 HandDrum",
"P 71 Tavil Hi",
"P 72 Tavil Lo",
"P 73 Monastic",
"P 74 Tavasa",
"P 75 Tabla",
"P 76 TblaDyn1",
"P 77 TblaDyn2",
"P 78 Ghatabla",
"P 79 Tablchrd",
"P 80 Haji",
"P 81 TimbleHi",
"P 82 TimbleLo",
"P 83 8cwPitch",
"P 84 8oh8 Cow",
"P 85 8oh8 Rim",
"P 86 CongaRap",
"P 87 8oh8Clap",
"P 88 9oh9Clap",
"P 89 Big Clap",
"P 90 LiteSnap",
"P 91 ClscSnap",
"P 92 Pwr Snap",
"P 93 Clave",
"P 94 ClveKord",
"P 95 Castanet",
"P 96 CastRoll",
"P 97 CastDyn1",
"P 98 CastDyn2",
"P 99 Wood Hi",
"P 100 Wood Lo",
"P 101 Block Hi",
"P 102 Block Lo",
"P 103 TempleHi",
"P 104 TempleLo",
"P 105 Vibrslap",
"P 106 Oil Can",
"P 107 OilPitch",
"P 108 MetalTik",
"P 109 Plucky",
"P 110 PopCheek",
"P 111 Rappotab",
"P 112 I'm Clay",
"P 113 BigoBrek",
"P 114 SpacePrc",
"E 0 Anvil",
"E 1 BallPeen",
"E 2 BattyBel",
"E 3 4 Star",
"E 4 Blksmith",
"E 5 Clank",
"E 6 Tank Hit",
"E 7 SunBurst",
"E 8 Industry",
"E 9 Big Shot",
"E 10 Metal",
"E 11 WhtNoiz1",
"E 12 WhtNoiz2",
"E 13 Spectre1",
"E 14 Spectre2",
"E 15 Tesla",
"E 16 Machine",
"E 17 PinkZap1",
"E 18 PinkZap2",
"E 19 PnkBlst1",
"E 20 PnkBlst2",
"E 21 Zap 1",
"E 22 Zap 2",
"E 23 Zap 3",
"E 24 Wood Zap",
"E 25 Dyn Zap",
"E 26 Dual Zap",
"E 27 Residue",
"E 28 WhipCrak",
"E 29 Kung Fu",
"E 30 WhipNoiz",
"E 31 Vinyl 1",
"E 32 Vinyl 2",
"E 33 DynVinyl",
"E 34 PwrGtrHi",
"E 35 PwrGtrLo",
"E 36 Gtr Hit",
"E 37 FlngGtrH",
"E 38 FlngGtrL",
"E 39 Guitrbot",
"E 40 Slippery",
"E 41 Danger!",
"E 42 Screech",
"E 43 FlScreeH",
"E 44 FlScreeL",
"E 45 Mercury",
"E 46 Technoid",
"E 47 Bucket",
"E 48 Grab Bag",
"E 49 Alloys 1",
"E 50 Alloys 2",
"E 51 Velopede",
"E 52 Static",
"E 53 Pole",
"E 54 Froggy",
"E 55 Sun City",
"E 56 InduHit",
"E 57 JetBeads",
"E 58 Plonk",
"E 59 Klonk",
"E 60 Pop",
"E 61 Knock",
"E 62 Metronom",
"E 63 Silence",
"R 0 BrtHatC1",
"R 1 BrtHatC2",
"R 2 RokHatCl",
"R 3 Real Snr",
"R 4 LooseSnr",
"R 5 TinSnare",
"R 6 ValleySn",
"R 7 FreakSnr",
"R 8 Aliens",
"R 9 Zapalog",
"R 10 Blasters",
"R 11 Metalize",
"R 12 ShknBake",
"R 13 Triblism",
"R 14 CngoBngo",
"R 15 RagaBabl"
};
public boolean testVerify(Synth synth2,
String key,
Object obj1, Object obj2)
{
// spaces at end of name are fine
if (key.equals("name")) return true;
return false;
}
/** Return a list of all patch number names. Default is { "Main" } */
public String[] getPatchNumberNames() { return buildIntegerNames(21, 0); }
/** Return a list whether patches in banks are writeable. Default is { false } */
public boolean[] getWriteableBanks() { return new boolean[] { true }; }
/** Return a list whether individual patches can be written. Default is FALSE. */
public boolean getSupportsPatchWrites() { return true; }
public int getPatchNameLength() { return 15; }
public int getBatchDownloadWaitTime() { return 1500; }
public boolean librarianTested() { return true; }
}
|
edisyn/synth/alesisd4/AlesisD4.java
|
/***
Copyright 2020 by Sean Luke
Licensed under the Apache License version 2.0
*/
package edisyn.synth.alesisd4;
import edisyn.*;
import edisyn.gui.*;
import edisyn.util.*;
import java.awt.*;
import java.awt.geom.*;
import javax.swing.border.*;
import javax.swing.*;
import java.awt.event.*;
import java.util.*;
import java.io.*;
import javax.sound.midi.*;
/**
A patch editor for the Alesis D4 and DM5.
@author Sean Luke
*/
public class AlesisD4 extends Synth
{
public static final String[] PANS = { "<3", "<2", "<1", "--", "1>", "2", "3" };
public static final String[] GROUPS = { "Multi", "Single", "Group 1", "Group 2" };
public static final String[] KEYS = { "C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B" };
// These are NRPN parameter settings for various stuff we emit for individual parameters
public static final int NRPN_ROOT = 0x10;
public static final int NRPN_NOTE = 0x19;
public static final int NRPN_BANK = 0x08;
public static final int NRPN_NUMBER = 0x09;
public static final int NRPN_COARSE = 0x0A;
public static final int NRPN_FINE = 0x0B;
public static final int NRPN_VOLUME = 0x0C;
public static final int NRPN_PAN = 0x0D;
public static final int NRPN_OUTPUT = 0x0E;
public static final int NRPN_GROUP = 0x0F;
public static final int PAUSE_NRPN = 50;
// All the drum sounds are lumped together (see end of file). These are the numbers
// of each drum sound in each group -- I need this to break them out to send/receive sysex
public static final int[] D4_BANKS = { 99, 99, 55, 92, 76, 80 };
public static final int[] DM5_BANKS = { 95, 117, 71, 34, 36, 114, 65, 16 };
public int testNote = 60;
// choosers updated when changing D4 <--> D5M or changing the root note
Chooser[] drumChoosers = new Chooser[61];
JCheckBox check;
boolean dm5;
public static final String DM5_KEY = "DM5";
public boolean isDM5() { return dm5; }
boolean reenntrantBlock = false;
public void setDM5(boolean val, boolean store)
{
if (reenntrantBlock) return;
reenntrantBlock = true;
if (store) setLastX("" + (!val), DM5_KEY, getSynthClassName(), false);
dm5 = val;
updateTitle();
updateChoosers();
if (check != null) check.setSelected(dm5);
reenntrantBlock = false;
}
public static String getSynthName() { return "Alesis D4/DM5"; }
public AlesisD4()
{
model.set("number", 0);
String m = getLastX(DM5_KEY, getSynthClassName());
dm5 = (m == null ? false : !Boolean.parseBoolean(m));
VBox vbox = new VBox();
HBox hbox = new HBox();
JComponent sourcePanel = new SynthPanel(this);
vbox.add(addNameGlobal(Style.COLOR_GLOBAL()));
vbox.add(addTriggers(Style.COLOR_A()));
vbox.add(addDrums(0, 7, Style.COLOR_B()));
sourcePanel.add(vbox, BorderLayout.CENTER);
addTab("General and Drums 0-6", sourcePanel);
boolean primary = true;
for(int i = 7; i <= 60; i+= 18)
{
sourcePanel = new SynthPanel(this);
vbox = new VBox();
vbox.add(addDrums(i, i+18, primary ? Style.COLOR_A() : Style.COLOR_B()));
sourcePanel.add(vbox, BorderLayout.CENTER);
addTab("Drums " + i + "-" + (i + 17), sourcePanel);
primary = !primary;
}
model.set("name", "Untitled");
loadDefaults();
updateChoosers();
}
public String getDefaultResourceFileName()
{
if (isDM5()) return "AlesisDM5.init";
else return "AlesisD4.init";
}
public String getHTMLResourceFileName() { return "AlesisD4.html"; }
public boolean gatherPatchInfo(String title, Model change, boolean writing)
{
JTextField number = new SelectedTextField("" + model.get("number"), 3);
while(true)
{
boolean result = showMultiOption(this, new String[] { "Patch Number"},
new JComponent[] { number }, title, "Enter the Patch number");
if (result == false)
return false;
int n;
try { n = Integer.parseInt(number.getText()); }
catch (NumberFormatException e)
{
showSimpleError(title, "The Patch Number must be an integer 0...20");
continue;
}
if (n < 0 || n > 20)
{
showSimpleError(title, "The Patch Number must be an integer 0...20");
continue;
}
change.set("number", n);
return true;
}
}
/** Add the global patch category (name, id, number, etc.) */
public JComponent addNameGlobal(Color color)
{
Category globalCategory = new Category(this, getSynthName(), color);
//globalCategory.makeUnresettable();
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
HBox hbox2 = new HBox();
comp = new PatchDisplay(this, 9, false);
hbox2.add(comp);
check = new JCheckBox("DM5");
check.setSelected(dm5);
check.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent e)
{
setDM5(check.isSelected(), true);
}
});
check.setFont(Style.SMALL_FONT());
check.setOpaque(false);
check.setForeground(Style.TEXT_COLOR());
hbox2.addLast(check);
vbox.add(hbox2);
comp = new StringComponent("Patch Name", this, "name", 15, "Name must be up to 15 ASCII characters.")
{
public String replace(String val)
{
return revisePatchName(val);
}
public void update(String key, Model model)
{
super.update(key, model);
updateTitle();
}
};
vbox.add(comp);
hbox.add(vbox);
comp = new LabelledDial("Root Note", this, "drumsetnoteroot", color, 0, 67)
{
public void update(String key, Model model)
{
super.update(key, model);
updateDrumNoteLabels(); // gonna be expensive
}
public String map(int val)
{
return KEYS[val % 12] + (val / 12 - 2);
}
};
hbox.add(comp);
globalCategory.add(hbox, BorderLayout.WEST);
return globalCategory;
}
public String revisePatchName(String name)
{
name = super.revisePatchName(name);
if (name == null) name = "";
char[] chars = name.toCharArray();
for(int i = 0; i < chars.length; i++)
{
if (chars[i] < 32 || chars[i] > 127)
chars[i] = ' ';
}
return new String(chars);
}
public String getDrumNoteLabel(int note, int root)
{
// The default root is 36
int n = note + root;
if (n == 60) // middle C
return " Drum " + note + " ( " + n + ": Middle C )";
else
return " Drum " + note + " ( " + n + ": " + KEYS[n % 12] + (n / 12 - 2) + " )";
}
public void updateDrumNoteLabels()
{
int root = model.get("drumsetnoteroot", 36);
for(int i = 0; i < 61; i++)
if (drumChoosers[i] != null)
drumChoosers[i].getLabel().setText(getDrumNoteLabel(i, root));
repaint();
}
public void updateChoosers()
{
// disable listeners
boolean li = model.getUpdateListeners();
model.setUpdateListeners(false);
boolean un = undo.getWillPush();
undo.setWillPush(false);
for(int i = 0; i < 61; i++)
{
if (drumChoosers[i] != null)
{
int j = drumChoosers[i].getIndex();
if (!isDM5() && j >= D4_DRUMS.length) // too high
{
j = 0;
}
drumChoosers[i].setElements(drumChoosers[i].getLabelText(), isDM5() ? DM5_DRUMS : D4_DRUMS);
drumChoosers[i].setIndex(j);
}
}
// reenable listeners
model.setUpdateListeners(li);
undo.setWillPush(un);
repaint();
}
public JComponent addDrums(int start, int end, Color color)
{
Category category = new Category(this, "Drums " + start + " - " + (end - 1), color);
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
int root = model.get("drumsetnoteroot", 36);
for(int i = start; i < end; i+=2)
{
hbox = new HBox();
VBox vbox2 = new VBox();
params = D4_DRUMS;
comp = new Chooser(getDrumNoteLabel(i, root), this, "drum" + i + "voice", params);
vbox2.add(comp);
drumChoosers[i] = ((Chooser)comp);
HBox hbox2 = new HBox();
params = GROUPS;
comp = new CheckBox("Aux Out", this, "drum" + i + "output");
hbox2.add(comp);
comp = new Chooser("Groups", this, "drum" + i + "groups", params);
hbox2.add(comp);
vbox2.add(hbox2);
hbox.add(vbox2);
comp = new LabelledDial("Volume", this, "drum" + i + "volume", color, 0, 99);
hbox.add(comp);
comp = new LabelledDial("Pan", this, "drum" + i + "pan", color, 0, 6)
{
public String map(int val)
{
return PANS[val];
}
public boolean isSymmetric() { return true; }
};
hbox.add(comp);
comp = new LabelledDial("Coarse", this, "drum" + i + "coarse", color, 0, 7, 4)
{
public int getDefaultValue() { return 4; }
public double getStartAngle()
{
return 245;
}
};
hbox.add(comp);
comp = new LabelledDial("Fine", this, "drum" + i + "fine", color, 0, 99);
hbox.add(comp);
if (i < end - 1)
{
hbox.add(Strut.makeHorizontalStrut(30));
vbox2 = new VBox();
params = D4_DRUMS;
comp = new Chooser(getDrumNoteLabel((i + 1), root), this, "drum" + (i + 1) + "voice", params);
vbox2.add(comp);
drumChoosers[i + 1] = ((Chooser)comp);
hbox2 = new HBox();
params = GROUPS;
comp = new CheckBox("Aux Out", this, "drum" + (i + 1) + "output");
hbox2.add(comp);
comp = new Chooser("Groups", this, "drum" + (i + 1) + "groups", params);
hbox2.add(comp);
vbox2.add(hbox2);
hbox.add(vbox2);
comp = new LabelledDial("Volume", this, "drum" + (i + 1) + "volume", color, 0, 99);
hbox.add(comp);
comp = new LabelledDial("Pan", this, "drum" + (i + 1) + "pan", color, 0, 6)
{
public String map(int val)
{
return PANS[val];
}
public boolean isSymmetric() { return true; }
};
hbox.add(comp);
comp = new LabelledDial("Coarse", this, "drum" + (i + 1) + "coarse", color, 0, 7, 4)
{
public int getDefaultValue() { return 4; }
public double getStartAngle()
{
return 245;
}
};
hbox.add(comp);
comp = new LabelledDial("Fine", this, "drum" + (i + 1) + "fine", color, 0, 99);
hbox.addLast(comp);
}
vbox.add(hbox);
}
category.add(vbox, BorderLayout.CENTER);
return category;
}
public JComponent addTriggers(Color color)
{
Category category = new Category(this, "Triggers", color);
JComponent comp;
String[] params;
HBox hbox = new HBox();
comp = new LabelledDial("Footswitch", this, "footswitchclosing", color, 0, 60)
{
public String map(int val)
{
val += model.get("drumsetnoteroot", 0);
return KEYS[val % 12] + (val / 12 - 2);
}
};
getModel().register("drumsetnoteroot", ((LabelledDial)comp));
((LabelledDial)comp).addAdditionalLabel("Closing");
hbox.add(comp);
comp = new LabelledDial("Footswitch", this, "footswitchheld", color, 0, 60)
{
public String map(int val)
{
val += model.get("drumsetnoteroot", 0);
return KEYS[val % 12] + (val / 12 - 2);
}
};
getModel().register("drumsetnoteroot", ((LabelledDial)comp));
((LabelledDial)comp).addAdditionalLabel("Held");
hbox.add(comp);
for(int i = 1; i <= 12; i++)
{
comp = new LabelledDial("Trigger " + i + " ", this, "trigger" + i, color, 0, 60)
{
public String map(int val)
{
val += model.get("drumsetnoteroot", 0);
return KEYS[val % 12] + (val / 12 - 2);
}
};
getModel().register("drumsetnoteroot", ((LabelledDial)comp));
hbox.add(comp);
}
category.add(hbox, BorderLayout.CENTER);
return category;
}
public int getTestNotePitch() { return testNote; }
public int parse(byte[] data, boolean fromFile)
{
boolean d4 = (data[4] == 0x06);
int opcode = data[6];
int drumset = 0;
if (opcode == 0x01) // edit buffer
{
drumset = -1; // we'll say that's the edit buffer
}
if (opcode < 64 && opcode >= 32) // Drumset. Stuff >= 64 are requests.
{
drumset = (opcode - 32);
}
if (drumset <= 21) // we got one
{
setDM5(!d4, false);
if (drumset != -1)
model.set("number", drumset);
int pos = 7; // start of data
// get name
char[] name = new char[14];
for(int i = 0; i < 14; i++)
name[i] = (char)(data[pos++]);
model.set("name", new String(name));
model.set("drumsetnoteroot", data[pos++]);
model.set("footswitchclosing", data[pos++]);
model.set("footswitchheld", data[pos++]);
for(int i = 1; i <= 12; i++)
{
model.set("trigger" + i, data[pos++]);
}
// packets
for(int i = 0; i < 61; i++)
{
model.set("drum" + i + "volume", data[pos++]);
byte b = data[pos++];
model.set("drum" + i + "pan", b >>> 4);
model.set("drum" + i + "output", (b >>> 3) & 1);
int bank = (b & 7);
int number = data[pos++];
int banksum = 0;
for(int bb = 0; bb < bank; bb++)
banksum += (d4 ? D4_BANKS[bb > 5 ? 0 : bb] : DM5_BANKS[bb > 7 ? 0 : bb]);
// assemble bank and number into voice
model.set("drum" + i + "voice", banksum + number);
model.set("drum" + i + "fine", data[pos++]);
b = data[pos++];
model.set("drum" + i + "groups", (b >>> 3) & 7);
model.set("drum" + i + "coarse", b & 7);
}
return PARSE_SUCCEEDED;
}
else return PARSE_FAILED;
}
public int map(int i, int max)
{
int v = (int)((i * 127.0 + max) / max);
if (v > 127) v = 127;
return v;
}
public Object[] emitAll(String key)
{
if (key.equals("drumsetnoteroot"))
{
ArrayList data = new ArrayList();
final int total = 68;
Object[] nrpn = buildNRPN(getChannelOut(), NRPN_ROOT, (128 * map(model.get(key), total-1)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
return (Object[])(data.toArray(new Object[0]));
}
else if (key.startsWith("drum") && !key.equals("drumsetnoteroot"))
{
// we can't emit a parameter, but at least we can set up
// the preview button to play it
int drum = StringUtility.getFirstInt(key);
// compute bank and number
int voice = model.get("drum" + drum + "voice");
int bank = -1;
int number = -1;
for(int j = 0; j < (isDM5() ? DM5_BANKS.length : D4_BANKS.length); j++)
{
if (voice < (isDM5() ? DM5_BANKS[j] : D4_BANKS[j]))
{
bank = j;
number = voice;
break;
}
else
voice -= (isDM5() ? DM5_BANKS[j] : D4_BANKS[j]);
}
if (bank == -1) // error, should never happen
{
System.err.println("ERROR (AlesisD4.emit): bank and voice are bad for " + isDM5() + " " + model.get("drum" + drum + "voice"));
return new Object[0];
}
//// IMPORTANT NOTE
////
//// True to form, the NRPN documentation in Alesis's D4 service manual is completely wrong.
//// The manual states that to compute the NRPN MSB value, you take the current value, multiply by 127,
//// then divide by the maximum value. They even give a (wrong) example: to compute the value for
//// volume = 50 (volume goes 0...99), you do 50 * 127 / 99 = 64. WRONG WRONG WRONG.
////
//// The correct formula is unknown. However for all parameters below except for COARSE TUNING,
//// I have had success with the equation shown in map(i, max) above. For COARSE TUNING below
//// I have a custom equation which seems to work right.
////
//// Absolutely nowhere on the internet does a correction appear for this. Apparently nobody noticed.
////
//// I can only test on the D4, so I am not certain if these equations will work properly for the drum
//// voices for the DM5. I need someone else to test for me.
ArrayList data = new ArrayList();
// I believe this sets the note that preview is playing, and also (?)
// more importantly, the later changes after it will change that particular note.
// Maybe?
Object[] nrpn = null;
// always
testNote = drum + model.get("drumsetnoteroot");
nrpn = buildNRPN(getChannelOut(), NRPN_NOTE, 128 * map(drum, 60));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
if (key.endsWith("voice"))
{
int total = (isDM5() ? DM5_BANKS.length : D4_BANKS.length);
nrpn = buildNRPN(getChannelOut(), NRPN_BANK, (128 * ((bank + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
total = (isDM5() ? DM5_BANKS[bank]: D4_BANKS[bank]);
//int math = ((number + 1) * 127) / total;
//int math = ((number + 1 - 1) * 127) / (total - 1);
nrpn = buildNRPN(getChannelOut(), NRPN_NUMBER, 128 * map(number, total-1));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("coarse"))
{
final int total = 8;
// my mapping function doesn't work for this one
// nrpn = buildNRPN(getChannelOut(), NRPN_COARSE, (128 * map(model.get(key), total-1)));
nrpn = buildNRPN(getChannelOut(), NRPN_COARSE, (128 * ((model.get(key) + 1) * 127 / total))); // this one works however
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("fine"))
{
final int total = 100;
nrpn = buildNRPN(getChannelOut(), NRPN_FINE, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("volume"))
{
final int total = 100;
nrpn = buildNRPN(getChannelOut(), NRPN_VOLUME, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("pan"))
{
final int total = 7;
nrpn = buildNRPN(getChannelOut(), NRPN_PAN, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("output"))
{
final int total = 2;
nrpn = buildNRPN(getChannelOut(), NRPN_OUTPUT, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else if (key.endsWith("groups"))
{
final int total = 4;
nrpn = buildNRPN(getChannelOut(), NRPN_GROUP, (128 * map(model.get(key), total-1))); // (128 * ((model.get(key) + 1) * 127 / total)));
for(int i = 0; i < nrpn.length; i++) data.add(nrpn[i]);
data.add(new Integer(PAUSE_NRPN));
}
else
{
System.err.println("ERROR (AlesisD4.emit): unknown key " + key + ", should never happen.");
return new Object[0];
}
return (Object[])(data.toArray(new Object[0]));
}
else
{
return new Object[0];
}
}
public byte[] emit(Model tempModel, boolean toWorkingMemory, boolean toFile)
{
if (tempModel == null)
tempModel = getModel();
boolean d4 = !isDM5();
byte[] data = new byte[343];
data[0] = (byte)0xF0;
data[1] = (byte)0x00;
data[2] = (byte)0x00;
data[3] = (byte)0x0E;
data[4] = (byte)(d4 ? 0x06 : 0x13);
data[5] = (byte)getChannelOut();
data[6] = (byte)(toWorkingMemory ? 0x01 : 32 + tempModel.get("number"));
int pos = 7; // start of data
String name = model.get("name", " ") + " ";
for(int i = 0; i < 14; i++)
data[pos++] = (byte)(name.charAt(i));
data[pos++] = (byte)model.get("drumsetnoteroot");
data[pos++] = (byte)model.get("footswitchclosing");
data[pos++] = (byte)model.get("footswitchheld");
for(int i = 1; i <= 12; i++)
{
data[pos++] = (byte)model.get("trigger" + i);
}
// packets
for(int i = 0; i < 61; i++)
{
// compute bank and number
int voice = model.get("drum" + i + "voice");
int bank = -1;
int number = -1;
for(int j = 0; j < (isDM5() ? DM5_BANKS.length : D4_BANKS.length); j++)
{
if (voice < (isDM5() ? DM5_BANKS[j] : D4_BANKS[j]))
{
bank = j;
number = voice;
break;
}
else
voice -= (isDM5() ? DM5_BANKS[j] : D4_BANKS[j]);
}
if (bank == -1) // error, should never happen
{
System.err.println("ERROR (AlesisD4.emit): bank and voice are bad for " + isDM5() + " " + model.get("drum" + i + "voice"));
return new byte[0];
}
data[pos++] = (byte)model.get("drum" + i + "volume");
data[pos++] = (byte)((model.get("drum" + i + "pan") << 4) |
(model.get("drum" + i + "output") << 3) |
bank);
data[pos++] = (byte)number;
data[pos++] = (byte)model.get("drum" + i + "fine");
data[pos++] = (byte)((model.get("drum" + i + "groups") << 3) |
model.get("drum" + i + "coarse"));
}
// compute checksum
int checksum = 0;
for(int i = 7; i < data.length - 2; i++)
{
checksum += data[i];
}
checksum = checksum & 127;
data[data.length - 2] = (byte)checksum;
data[data.length - 1] = (byte)0xF7;
return data;
}
public byte[] requestDump(Model tempModel)
{
if (tempModel == null)
tempModel = getModel();
byte NN = (byte)(tempModel.get("number") + 32 + 64);
byte TYPE = (byte)(isDM5() ? 0x13 : 0x06);
return new byte[] { (byte)0xF0, 0x00, 0x00, 0x0E, TYPE, (byte)getChannelOut(), NN, (byte)0xF7 };
}
public byte[] requestCurrentDump()
{
byte TYPE = (byte)(isDM5() ? 0x13 : 0x06);
return new byte[] { (byte)0xF0, 0x00, 0x00, 0x0E, TYPE, (byte)getChannelOut(), 65, (byte)0xF7 };
}
public String getPatchName(Model model)
{
return model.get("name", "Untitled");
}
public String getPatchLocationName(Model model)
{
int num = model.get("number", 0);
if (num < 10) return "0" + num;
else return "" + num;
}
public Model getNextPatchLocation(Model model)
{
int number = model.get("number");
number++;
if (number >= 21)
{
number = 0;
}
Model newModel = buildModel();
newModel.set("number", number);
return newModel;
}
public int getPauseAfterChangePatch() { return 200; }
public int getPauseAfterWritePatch() { return 2000; }
public void changePatch(Model tempModel)
{
tryToSendMIDI(buildPC(getChannelOut(), tempModel.get("number")));
}
public static final String[] D4_DRUMS = new String[]
{
// I note that D4 drums start with 1, whereas DM5 drums start with 0
"K 1 Big \"O\"",
"K 2 Stomp",
"K 3 Industry",
"K 4 Spiked",
"K 5 Spike RM",
"K 6 Spike HL",
"K 7 Pillow",
"K 8 Plated",
"K 9 Hi Foot",
"K 10 Hi Foot RM",
"K 11 Foot",
"K 12 Foot RM",
"K 13 Foot HL",
"K 14 Foot GT",
"K 15 6O's R&B",
"K 16 R&B Room",
"K 17 Monster",
"K 18 Mnstr RM",
"K 19 Gtd Mnstr",
"K 20 Dbl Head",
"K 21 Dbl RM",
"K 22 Dbl HL",
"K 23 Thrash",
"K 24 Stab",
"K 25 Brt Stab",
"K 26 Stab RM",
"K 27 Stab HL",
"K 28 Reggae",
"K 29 Kinetic",
"K 30 C&W #1",
"K 31 22\" Power",
"K 32 Amb 22 Pwr",
"K 33 22\" Pwr RM",
"K 34 Solid!!",
"K 95 Solid RM",
"K 36 Tiled",
"K 37 Tiled RM",
"K 38 Chunky",
"K 39 Chunky RM",
"K 40 Chunky HL",
"K 41 Loose One",
"K 42 Amb Loose",
"K 43 Slammin'",
"K 44 Slam Room",
"K 45 22\" Deep",
"K 46 Amb Deep",
"K 47 Deep Room",
"K 48 Movin' Air",
"K 49 Mstr Mash",
"K 50 Swift",
"K 51 Abrupt",
"K 52 Fusion",
"K 53 Muffled",
"K 54 Blaster",
"K 55 24\" Power",
"K 56 Amb 24\"",
"K 57 24\" Room",
"K 58 Deep 24\"",
"K 59 Ballad",
"K 60 Big Foot",
"K 61 8 Foot RM",
"K 62 The Thud",
"K 63 Amb Thud",
"K 64 Thud Hall",
"K 65 C&W #2",
"K 66 Tite",
"K 67 Tite Room",
"K 68 Tite Hall",
"K 69 Chopped",
"K 70 Flanged",
"K 71 Deep Wood",
"K 72 Wood Room",
"K 73 Head Punch",
"K 74 Amb Punch",
"K 75 Garage",
"K 76 Studio",
"K 77 Hanger",
"K 78 Lo Wood",
"K 79 Lo Wood HL",
"K 80 Rap",
"K 81 Rumble",
"K 82 DDL Slap",
"K 83 Trendy",
"K 84 Elec..",
"K 85 ..tronic",
"K 86 Ambnt Elc.",
"K 87 Amb Tronic",
"K 88 Faazed",
"K 89 Faazed RM",
"K 90 Tite Head",
"K 91 Hi Wisp",
"K 92 LoWisp",
"K 93 Hi Wet Wisp",
"K 94 Lo Wet Wisp",
"K 95 Techno",
"K 96 Techno RM",
"K 97 Lo Techno",
"K 98 Lo Tech RM",
"K 99 Coliseum",
"S 1 Raw Hide",
"S 2 Fat City",
"S 3 Ambient",
"S 4 Torqued",
"S 5 Classic",
"S 6 Classic RM",
"S 7 Hi Class",
"S 8 Hi Class RM",
"S 9 MIT Gate",
"S 10 Dry Combo",
"S 11 Combo RM",
"S 12 Combo Plate",
"S 13 Flange",
"S 14 Media Hype",
"S 15 Rim->Center",
"S 16 Center->Rim",
"S 17 Squwank",
"S 18 Standard",
"S 19 Brighter",
"S 20 Darker",
"S 21 Hi Gated",
"S 22 Lo Gated",
"S 23 Deep Dry",
"S 24 Nasty",
"S 25 Piccolo",
"S 26 Wet Piccolo",
"S 27 Hi Piccolo",
"S 28 Amb Hi Picc",
"S 29 Aerolux",
"S 30 Wood",
"S 31 Wood Room",
"S 32 Wood Shed",
"S 33 Rap",
"S 34 Lo Rap",
"S 35 Flanger",
"S 36 Cracker Box",
"S 37 Bigger Box",
"S 38 Art O Fish",
"S 39 Air Burst",
"S 40 Edged In",
"S 41 Edged Out",
"S 42 ..Wet 40",
"S 43 ..Wet 41",
"S 44 Compressed",
"S 45 W/Verb",
"S 46 Wrap",
"S 47 Attak Rap",
"S 48 Brassy",
"S 49 Brassy RM",
"S 50 Brutal",
"S 51 Lo Brutal",
"S 52 Crisp Hit",
"S 53 Crisp RM",
"S 54 Pop 'n Pic",
"S 55 Alloy",
"S 56 Aggressive",
"S 57 WetAggrssv",
"S 58 Clik Clak",
"S 59 Chrome",
"S 60 Reverb",
"S 61 Media",
"S 62 Overtone",
"S 63 Overtone RM",
"S 64 Verbose",
"S 65 Dance!",
"S 66 Grrrated",
"S 67 Dry Punch",
"S 68 Amb Punch",
"S 69 Spiked..",
"S 70 Bad Punch",
"S 71 Pop Shot",
"S 72 Lo Pop Shot",
"S 73 Flng Shot",
"S 74 Studio A",
"S 75 Studio B",
"S 76 Hyper Pic",
"S 77 Hi Elect",
"S 78 Hi Wet Lct",
"S 79 Electric",
"S 80 Wet Electric",
"S 81 Bitchin'",
"S 82 Fringe",
"S 83 Trbo Drive",
"S 84 Play Room",
"S 85 Slap It!",
"S 86 Un Natural",
"S 87 Arena",
"S 88 Brush",
"S 89 Brush Hit",
"S 90 Tote Stik",
"S 91 Stick",
"S 92 Big Stik",
"S 93 Gated",
"S 94 Rim Shot",
"S 95 R-Shot RM",
"S 96 Gunner",
"S 97 Sm Ballad",
"S 98 W/Tmbrne",
"S 99 Bg Ballad",
"C 1 R&B Hat",
"C 2 14\" Thin",
"C 3 Dyno Edge",
"C 4 14\" Med",
"C 5 14\" Tite",
"C 6 Power Hat",
"C 7 Rock Tite",
"C 8 Rock Edge",
"C 9 Tension",
"C 10 Jazzed!",
"C 11 Hard Hat",
"C 12 Rock Tip",
"C 13 Sputt..",
"C 14 Ambient",
"C 15 Wet Hat",
"C 16 Wetter..",
"C 17 Half Open",
"C 18 Wet Half",
"C 19 Rock Half",
"C 20 Clutched",
"C 21 The Edge",
"C 22 Trashy",
"C 23 Open It Up",
"C 24 Rattle",
"C 25 Wet Rattle",
"C 26 Close->Open",
"C 27 Open->Close",
"C 28 Hard Foot",
"C 29 Soft Foot",
"C 30 Wet Foot",
"C 31 Edge->Bell",
"C 32 Bell->Edge",
"C 33 The Swing",
"C 34 Flat Ride",
"C 35 60's Flng",
"C 36 Dark Ride",
"C 37 Ping Ride",
"C 38 Bell Ride",
"C 39 Flng Jazz",
"C 40 Flng Rock",
"C 41 Hi Crash",
"C 42 Lo Crash",
"C 43 10\" Splash",
"C 44 Medium",
"C 45 Lo Medium",
"C 46 20\" China",
"C 47 18\" Crash",
"C 48 Tiny",
"C 49 20\" Bronze",
"C 50 Flng Crsh",
"C 51 Lo Pang",
"C 52 Slo Crash",
"C 53 Flng Pang",
"C 54 Lft & Rght",
"C 55 Big L&R",
"T 1 Hi Power",
"T 2 Med Power",
"T 3 Low Power",
"T 4 Pwr Floor",
"T 5 Hi Thrash",
"T 6 Md Thrash",
"T 7 Low Thrash",
"T 8 Hi Slam",
"T 9 Low Slam",
"T 10 Hi Slam RM",
"T 11 Lo Slam RM",
"T 12 Hi Studio",
"T 13 Md Studio",
"T 14 Low Studio",
"T 15 Flr Studio",
"T 16 Low Flr Std",
"T 17 Ambnt Hi",
"T 18 Ambnt Md",
"T 19 Ambnt Low",
"T 20 Ambnt Flr",
"T 21 Hi Wet",
"T 22 Mid Wet",
"T 23 Low Wet",
"T 24 Floor Wet",
"T 25 Low Flr Wet",
"T 26 Hi Blade",
"T 27 Md Blade",
"T 28 Low Blade",
"T 29 Hi Stereo",
"T 30 Md Stereo",
"T 31 Low Stereo",
"T 32 Hi Return",
"T 33 Md Return",
"T 34 Low Return",
"T 35 10\" Stark",
"T 36 12\" Stark",
"T 37 14\" Stark",
"T 38 16\" Stark",
"T 39 Hi Cannon",
"T 40 Md Cannon",
"T 41 Low Cannon",
"T 42 XLow Cannon",
"T 43 Hi Cannon HL",
"T 44 Md Cann HL",
"T 45 Low Cann HL",
"T 46 XLo Cann HL",
"T 47 Hi Dbl",
"T 48 Md Dbl",
"T 49 Low Dbl",
"T 50 Hi Dbl RM",
"T 51 Md Dbl RM",
"T 52 Low Dbl RM",
"T 53 Hi Clear",
"T 54 Md Clear",
"T 55 Low Clear",
"T 56 Amb Hi Clear",
"T 57 Amb Md Clr",
"T 58 Amb La Clr",
"T 59 Hi Clr Wet",
"T 60 Md Clr Wet",
"T 61 Low Clr Wet",
"T 62 Ol' Hex 1",
"T 63 Ol' Hex 2",
"T 64 Ol' Hex 3",
"T 65 Wet Hex 1",
"T 66 Wet Hex 2",
"T 67 Wet Hex 3",
"T 68 Hi Dynamic",
"T 69 Md Dynamic",
"T 70 Low Dynamic",
"T 71 Hi D Amb",
"T 72 Md D Amb",
"T 73 Low D Amb",
"T 74 Hi Plate",
"T 75 Md Plate",
"T 76 Low Plate",
"T 77 Hi Media",
"T 78 Md Media",
"T 79 Low Media",
"T 80 Hi Flange",
"T 81 Mid Flange",
"T 82 Low Flange",
"T B3 Hi Aggrssv",
"T 84 Md Aggrssv",
"T 85 Low Aggrssv",
"T 86 Hi Ring",
"T 87 Low Ring",
"T 88 Hi Ring RM",
"T 89 Low Ring RM",
"T 90 Hi Phase",
"T 91 Md Phase",
"T 92 Low Phase",
"P 1 Talk Up",
"P 2 Talk Down",
"P 3 Squeezed",
"P 4 Released",
"P 5 Shaker",
"P 6 Hi Timble",
"P 7 Mid Timble",
"P 8 Low Timble",
"P 9 Hi W/Verb",
"P 10 Mid W/Verb",
"P 11 Low W/Verb",
"P 12 Conga",
"P 13 Low Conga",
"P 14 Hi Open",
"P 15 Low Open",
"P 16 Conga Slap",
"P 17 Low Slap",
"P 18 Dynamic A",
"P 19 Dynamic B",
"P 20 Dynamic C",
"P 21 Hi Vibra",
"P 22 Low Vibra",
"P 23 Hi Bongo",
"P 24 Low Bongo",
"P 25 Hi Cow",
"P 26 Med Cow",
"P 27 Low Cow",
"P 28 Heifer",
"P 29 Guernsey",
"P 30 Holstein",
"P 31 Torpedo",
"P 32 Low Torpedo",
"P 33 Hi Agogo",
"P 34 Low Agogo",
"P 35 Hi Muted",
"P 36 Med Muted",
"P 37 Low Muted",
"P 38 Hi Wood",
"P 39 Med Wood",
"P 40 Lo Wood",
"P 41 Hi Block",
"P 42 Med Block",
"P 43 Low Block",
"P 44 Hi Folley",
"P 45 Med Folley",
"P 46 Low Folley",
"P 47 Hi Synth",
"P 48 Mid Synth",
"P 49 Low Synth",
"P 50 Flg Synth",
"P 51 Cabasa",
"P 52 Fast Cabasa",
"P 53 Long Cabasa",
"P 54 Marabasa",
"P 55 Tambrine",
"P 56 Dark Tambrn",
"P 57 Hard Tambrn",
"P 58 Hi Sticks",
"P 59 Med Sticks",
"P 60 Low Sticks",
"P 61 Finger Snaps",
"P 62 Power Snap",
"P 63 Wide Snap",
"P 64 Hand Clap",
"P 65 Gated Claps",
"P 66 Hi Clave",
"P 67 Lo Clave",
"P 68 Triangle",
"P 69 Dinner Bell",
"P 70 Maracas",
"P 71 Low Maracas",
"P 72 Fast Maracas",
"P 73 Far East",
"P 74 Far West",
"P 75 Odd Shake",
"P 76 Bead Bag",
"E 1 Gut Wrench",
"E 2 Xylimbal",
"E 3 Xylimbal 2",
"E 4 Xylimbal 3",
"E 5 Layr Bell",
"E 6 Hi Lip Pop",
"E 7 Lip Pop",
"E 8 Loose Lip",
"E 9 Door Slam",
"E 10 Puh!",
"E 11 Puh-tooy",
"E 12 Scrape It",
"E 13 Broken",
"E 14 Scratch",
"E 15 Cat Scratch",
"E 16 Slow Scratch",
"E 17 Un Bottled",
"E 18 Air Wrench",
"E 19 Trq Wrench",
"E 20 Pwer Wrnch",
"E 21 Fat Frog",
"E 22 Anvil",
"E 23 Trash Lid",
"E 24 Trash Can",
"E 25 Dumpster",
"E 26 Firecracker",
"E 27 China Break",
"E 28 Glass Break",
"E 29 Window Brk",
"E 30 Chopstix",
"E 31 Bottle",
"E 32 Low Bottle",
"E 33 Jug",
"E 34 STorpedo",
"E 35 Hi Whip",
"E 36 Low Whip",
"E 37 Whippit",
"E 38 Tomb Slam",
"E 39 Hollow 1",
"E 40 Hollow 2",
"E 41 Hollow 3",
"E 42 Hollow 4",
"E 43 Hollow 5",
"E 44 Hollow 6",
"E 45 Hollow 7",
"E 46 Hollow 8",
"E 47 Blip",
"E 48 Big Blip",
"E 49 Sour Milk",
"E 50 Hi Thang",
"E 51 Thang",
"E 52 Low Thang",
"E 53 A Squib?",
"E 54 A Squab?",
"E 55 Hi Pipe",
"E 56 Mid Pipe",
"E 57 Low Pipe",
"E 58 Hi Ethnic",
"E 59 Med Ethnic",
"E 60 Low Ethnic",
"E 61 Bent Bongo",
"E 62 Re-Bent",
"E 63 Hi Filter",
"E 64 Low Filter",
"E 65 Ratl Boom",
"E 66 Face Slap",
"E 67 Heavy Metal",
"E 68 Lite Metal",
"E 69 Clatter",
"E 70 Bamboo",
"E 71 Bamb Cmbo",
"E 72 Digital",
"E 73 Tamboo",
"E 74 Schizoid",
"E 75 Thunder",
"E 76 Analouge",
"E 77 Re-Synth",
"E 78 L To R",
"E 79 Saucers?",
"E 80 Silence"
};
public static final String[] DM5_DRUMS = new String[]
{
// I note that D4 drums start with 1, whereas DM5 drums start with 0
"K 0 Arena",
"K 1 Producer",
"K 2 Pwr Rock",
"K 3 Fat Head",
"K 4 Dark Fat",
"K 5 Passion",
"K 6 Holo",
"K 7 WarmKick",
"K 8 SpeedMtl",
"K 9 Plastine",
"K 10 Back Mic",
"K 11 FrontMic",
"K 12 Lite",
"K 13 RubbrBtr",
"K 14 Simple",
"K 15 Basic",
"K 16 Slammin'",
"K 17 Foot",
"K 18 Bch Ball",
"K 19 LowSolid",
"K 20 Feels Gd",
"K 21 Pillow",
"K 22 Fusion",
"K 23 Reggae",
"K 24 Kinetica",
"K 25 Brt Ambi",
"K 26 Hi Gate",
"K 27 Med Room",
"K 28 Lrg Room",
"K 29 Forum",
"K 30 Punchy",
"K 31 InTheKik",
"K 32 Big One",
"K 33 Bonk",
"K 34 RockClub",
"K 35 MyTribe",
"K 36 RoundAmb",
"K 37 RoundAtk",
"K 38 HardAttk",
"K 39 Blitz",
"K 40 9oh9Kik1",
"K 41 9oh9Kik2",
"K 42 9oh9Kik3",
"K 43 Native",
"K 44 AnaKick",
"K 45 Mangler",
"K 46 SuprRave",
"K 47 Spud",
"K 48 Rap Wave",
"K 49 Beat Box",
"K 50 WeR Borg",
"K 51 Indscpln",
"K 52 SonarWav",
"K 53 60Cycles",
"K 54 Motor",
"K 55 Stages",
"K 56 Cybrwave",
"K 57 Cybo",
"K 58 BrainEtr",
"K 59 Squish",
"K 60 Crunch",
"K 61 Thump",
"K 62 CrnchHed",
"K 63 CrnchFlp",
"K 64 Pwr Down",
"K 65 Hardware",
"K 66 JunkDrwr",
"K 67 Junk Man",
"K 68 LooseLug",
"K 69 Carpet",
"K 70 Smoke",
"K 71 Aggresor",
"K 72 BadBreth",
"K 73 King",
"K 74 Xpando",
"K 75 Deep IIx",
"K 76 Dry IIx",
"K 77 Hex Kick",
"K 78 Fat Boy",
"K 79 Techtik",
"K 80 Skool",
"K 81 KidStuff",
"K 82 Scratchr",
"K 83 Afro",
"K 84 Cuban",
"K 85 Tribal",
"K 86 Steak",
"K 87 Hazey",
"K 88 Koosh",
"K 89 Bowels",
"K 90 Obergeil",
"K 91 HiEnergy",
"K 92 Undrwrld",
"K 93 Cruiser",
"K 94 Plumbing",
"S 0 Get Real",
"S 1 Big Rim",
"S 2 Woodclif",
"S 3 Hip Hop",
"S 4 Heartlnd",
"S 5 PwrBalld",
"S 6 Session",
"S 7 Funky",
"S 8 Choked",
"S 9 Crome",
"S 10 ChromRng",
"S 11 ChromeHi",
"S 12 Beauty",
"S 13 Piccolo",
"S 14 Fat Picc",
"S 15 Hi Ambi",
"S 16 MicroPic",
"S 17 PiccRoom",
"S 18 Low Picc",
"S 19 NicePicc",
"S 20 Gun Picc",
"S 21 Dyn Picc",
"S 22 Velo>Rim",
"S 23 Tiny E",
"S 24 Crisp",
"S 25 Clean",
"S 26 Cadence",
"S 27 DryShell",
"S 28 TopBrass",
"S 29 UltraThn",
"S 30 Kamko",
"S 31 Hawaii",
"S 32 BluSprkl",
"S 33 Bronze",
"S 34 Hard Rim",
"S 35 Vintage",
"S 36 Weasel",
"S 37 WetWeasl",
"S 38 Has Edge",
"S 39 WithClap",
"S 40 Raunchy",
"S 41 DeepRoom",
"S 42 SlapRoom",
"S 43 WarmRoom",
"S 44 AnaKick",
"S 45 LongTail",
"S 46 ExtraLrg",
"S 47 Big Hall",
"S 48 BigPlate",
"S 49 Compresd",
"S 50 Solar",
"S 51 Far Away",
"S 52 Postmdrn",
"S 53 Loose",
"S 54 Grinder",
"S 55 Freaky",
"S 56 Woody",
"S 57 ThinSkin",
"S 58 Crank It",
"S 59 Snareo",
"S 60 TightLug",
"S 61 Ibid",
"S 62 Beefrank",
"S 63 SlowFunk",
"S 64 Low Ring",
"S 65 FreakRim",
"S 66 MetlHarm",
"S 67 Groovy",
"S 68 Splat",
"S 69 RatlWood",
"S 70 Trashier",
"S 71 8oh8 Snr",
"S 72 8oh8 Rim",
"S 73 8oh8 Tin",
"S 74 Krafty",
"S 75 MetlPipe",
"S 76 9oh9 Snr",
"S 77 9oh9 Rim",
"S 78 Release",
"S 79 City",
"S 80 U Bahn",
"S 81 Gritty",
"S 82 Fat Grit",
"S 83 Rank",
"S 84 BrikHaus",
"S 85 Overtone",
"S 86 DingoBoy",
"S 87 Wonk",
"S 88 HexSnare",
"S 89 IIxSnare",
"S 90 70'sFunk",
"S 91 Ol Skool",
"S 92 Stutter",
"S 93 ThikGate",
"S 94 MetalGat",
"S 95 Face Beat",
"S 96 Thrasher",
"S 97 Shred",
"S 98 Pipe Bomb",
"S 99 Clanker",
"S 100 Blast",
"S 101 Assault",
"S 102 Speck",
"S 103 Spectral",
"S 104 OrchRoom",
"S 105 OrchHall",
"S 106 OrchRoll",
"S 107 BrushFat",
"S 108 BrushThn",
"S 109 BrushRim",
"S 110 Jazz Hit",
"S 111 Stik>Snr",
"S 112 DryStick",
"S 113 LiveStik",
"S 114 DeepStik",
"S 115 StikRoom",
"S 116 AmbiStik",
"T 0 Hero Hi",
"T 1 Hero Mid",
"T 2 Hero Low",
"T 3 Hero Flr",
"T 4 Open Hi",
"T 5 Open Mid",
"T 6 Open Low",
"T 7 PinstrpH",
"T 8 PinstrpM",
"T 9 PinstrpL",
"T 10 StudioHi",
"T 11 StudioMd",
"T 12 StudioLo",
"T 13 Big O Hi",
"T 14 Big O Lo",
"T 15 Girth Hi",
"T 16 Girth Lo",
"T 17 InsideHi",
"T 18 InsideMd",
"T 19 InsideLo",
"T 20 Jazz Hi",
"T 21 Jazz Low",
"T 22 Hall Hi",
"T 23 Hall Mid",
"T 24 Hall Low",
"T 25 Hall Flr",
"T 26 Psilo Hi",
"T 27 PsiloMid",
"T 28 PsiloLow",
"T 29 PsiloFlr",
"T 30 CannonHi",
"T 31 CannonMd",
"T 32 CannonLo",
"T 33 CannonFl",
"T 34 CanFlngH",
"T 35 CanFlngM",
"T 36 CanFlngL",
"T 37 Ballo Hi",
"T 38 BalloLow",
"T 39 MakRakHi",
"T 40 MakRakMd",
"T 41 MakRakLo",
"T 42 MakRakFl",
"T 43 Omega Hi",
"T 44 Omega Md",
"T 45 Omega Lo",
"T 46 Omega Fl",
"T 47 Salvo Hi",
"T 48 Salvo Md",
"T 49 Salvo Lo",
"T 50 Hex Hi",
"T 51 Hex Mid",
"T 52 Hex Low",
"T 53 HexFloor",
"T 54 ClascHex",
"T 55 Noise Hi",
"T 56 Noise Lo",
"T 57 Exo Hi",
"T 58 Exo Mid",
"T 59 Exo Low",
"T 60 OilCanHi",
"T 61 OilCanLo",
"T 62 8oh8 Hi",
"T 63 8oh8 Mid",
"T 64 8oh8 Low",
"T 65 Bit TomH",
"T 66 Bit TomL",
"T 67 BombTomH",
"T 68 BombTomM",
"T 69 BombTomL",
"T 70 Mad Roto",
"H 0 BrtTite1",
"H 1 BrtTite2",
"H 2 Brt Clsd",
"H 3 Brt Half",
"H 4 BrtLoose",
"H 5 BrtLoosr",
"H 6 DynBrt 1",
"H 7 DynBrt 2",
"H 8 Brt Open",
"H 9 Brt Foot",
"H 10 SR Clsd",
"H 11 SR Half",
"H 12 SR Open",
"H 13 LiteClsd",
"H 14 Lite Dyn",
"H 15 LiteHalf",
"H 16 LiteOpen",
"H 17 FlngClsd",
"H 18 FlngHalf",
"H 19 FlngOpen",
"H 20 Rok Clsd",
"H 21 RokLoose",
"H 22 RokSlosh",
"H 23 Rok Open",
"H 24 Rok Foot",
"H 25 8oh8Clsd",
"H 26 8oh8Open",
"H 27 Rap Clsd",
"H 28 Rap Half",
"H 29 Rap Open",
"H 30 Zip Clsd",
"H 31 Zip Open",
"H 32 Zap Clsd",
"H 33 Zap Open",
"C 0 Ride Cym",
"C 1 VeloRide",
"C 2 PingRide",
"C 3 Exotic",
"C 4 RideBell",
"C 5 TransBel",
"C 6 El Bell",
"C 7 Avantia",
"C 8 CymParts",
"C 9 BrtCrash",
"C 10 Ster Brt",
"C 11 DrkCrash",
"C 12 SterDark",
"C 13 LR Crsh1",
"C 14 LR Crsh2",
"C 15 IceCrash",
"C 16 ZootMute",
"C 17 DrtyMute",
"C 18 Splash",
"C 19 MicroCym",
"C 20 8 Splash",
"C 21 China",
"C 22 SterChna",
"C 23 Woo Han",
"C 24 Doppler",
"C 25 TipShank",
"C 26 SterPhaz",
"C 27 Hammered",
"C 28 EastWest",
"C 29 Orch Cym",
"C 30 8oh8Crsh",
"C 31 8CrashFl",
"C 32 Syn Pang",
"C 33 SynCrash",
"C 34 BlastCym",
"C 35 Noiz Cym",
"P 0 Agogo Hi",
"P 1 Agogo Lo",
"P 2 AgoPitch",
"P 3 Noggin",
"P 4 Reco Hi",
"P 5 Reco Lo",
"P 6 Clay Pot",
"P 7 Triangle",
"P 8 Tri Mute",
"P 9 TriPitch",
"P 10 DrumStix",
"P 11 Cowbell",
"P 12 Tambrine",
"P 13 TamPitch",
"P 14 Sleighbl",
"P 15 Snowjob",
"P 16 Cabasa",
"P 17 SharpShk",
"P 18 TikTak",
"P 19 Maracas",
"P 20 ShakerHi",
"P 21 ShakerLo",
"P 22 Bead Pot",
"P 23 BeadShk1",
"P 24 BeadShk2",
"P 25 BeadShk3",
"P 26 SynShkr1",
"P 27 SynShkr2",
"P 28 SynShkrD",
"P 29 Rattle",
"P 30 CrashrHd",
"P 31 CrashrSf",
"P 32 Rainshak",
"P 33 RainStik",
"P 34 Gravel",
"P 35 RatlBwap",
"P 36 Bongo Hi",
"P 37 BngHiSlp",
"P 38 Bongo Lo",
"P 39 BngLoSlp",
"P 40 Conga Hi",
"P 41 Conga Lo",
"P 42 CongaSlp",
"P 43 Slap Dyn",
"P 44 Screech",
"P 45 Cuica Hi",
"P 46 Cuica Lo",
"P 47 AmIndian",
"P 48 Tatonka",
"P 49 WarPaint",
"P 50 BoLanGoo",
"P 51 BoLanDyn",
"P 52 BreketaH",
"P 53 BreketaL",
"P 54 BrktaDyn",
"P 55 Elephant",
"P 56 GhatamHi",
"P 57 GhatamLo",
"P 58 Udu",
"P 59 Ethnika",
"P 60 Amazon",
"P 61 Nagara",
"P 62 Oobla Hi",
"P 63 Oobla Lo",
"P 64 OoblaDyn",
"P 65 Paah",
"P 66 Ethno",
"P 67 EasternV",
"P 68 TalkngHi",
"P 69 TalkngLo",
"P 70 HandDrum",
"P 71 Tavil Hi",
"P 72 Tavil Lo",
"P 73 Monastic",
"P 74 Tavasa",
"P 75 Tabla",
"P 76 TblaDyn1",
"P 77 TblaDyn2",
"P 78 Ghatabla",
"P 79 Tablchrd",
"P 80 Haji",
"P 81 TimbleHi",
"P 82 TimbleLo",
"P 83 8cwPitch",
"P 84 8oh8 Cow",
"P 85 8oh8 Rim",
"P 86 CongaRap",
"P 87 8oh8Clap",
"P 88 9oh9Clap",
"P 89 Big Clap",
"P 90 LiteSnap",
"P 91 ClscSnap",
"P 92 Pwr Snap",
"P 93 Clave",
"P 94 ClveKord",
"P 95 Castanet",
"P 96 CastRoll",
"P 97 CastDyn1",
"P 98 CastDyn2",
"P 99 Wood Hi",
"P 100 Wood Lo",
"P 101 Block Hi",
"P 102 Block Lo",
"P 103 TempleHi",
"P 104 TempleLo",
"P 105 Vibrslap",
"P 106 Oil Can",
"P 107 OilPitch",
"P 108 MetalTik",
"P 109 Plucky",
"P 110 PopCheek",
"P 111 Rappotab",
"P 112 I'm Clay",
"P 113 BigoBrek",
"P 114 SpacePrc",
"E 0 Anvil",
"E 1 BallPeen",
"E 2 BattyBel",
"E 3 4 Star",
"E 4 Blksmith",
"E 5 Clank",
"E 6 Tank Hit",
"E 7 SunBurst",
"E 8 Industry",
"E 9 Big Shot",
"E 10 Metal",
"E 11 WhtNoiz1",
"E 12 WhtNoiz2",
"E 13 Spectre1",
"E 14 Spectre2",
"E 15 Tesla",
"E 16 Machine",
"E 17 PinkZap1",
"E 18 PinkZap2",
"E 19 PnkBlst1",
"E 20 PnkBlst2",
"E 21 Zap 1",
"E 22 Zap 2",
"E 23 Zap 3",
"E 24 Wood Zap",
"E 25 Dyn Zap",
"E 26 Dual Zap",
"E 27 Residue",
"E 28 WhipCrak",
"E 29 Kung Fu",
"E 30 WhipNoiz",
"E 31 Vinyl 1",
"E 32 Vinyl 2",
"E 33 DynVinyl",
"E 34 PwrGtrHi",
"E 35 PwrGtrLo",
"E 36 Gtr Hit",
"E 37 FlngGtrH",
"E 38 FlngGtrL",
"E 39 Guitrbot",
"E 40 Slippery",
"E 41 Danger!",
"E 42 Screech",
"E 43 FlScreeH",
"E 44 FlScreeL",
"E 45 Mercury",
"E 46 Technoid",
"E 47 Bucket",
"E 48 Grab Bag",
"E 49 Alloys 1",
"E 50 Alloys 2",
"E 51 Velopede",
"E 52 Static",
"E 53 Pole",
"E 54 Froggy",
"E 55 Sun City",
"E 56 InduHit",
"E 57 JetBeads",
"E 58 Plonk",
"E 59 Klonk",
"E 60 Pop",
"E 61 Knock",
"E 62 Metronom",
"E 63 Silence",
"R 0 BrtHatC1",
"R 1 BrtHatC2",
"R 2 RokHatCl",
"R 3 Real Snr",
"R 4 LooseSnr",
"R 5 TinSnare",
"R 6 ValleySn",
"R 7 FreakSnr",
"R 8 Aliens",
"R 9 Zapalog",
"R 10 Blasters",
"R 11 Metalize",
"R 12 ShknBake",
"R 13 Triblism",
"R 14 CngoBngo",
"R 15 RagaBabl"
};
public boolean testVerify(Synth synth2,
String key,
Object obj1, Object obj2)
{
// spaces at end of name are fine
if (key.equals("name")) return true;
return false;
}
/** Return a list of all patch number names. Default is { "Main" } */
public String[] getPatchNumberNames() { return buildIntegerNames(21, 0); }
/** Return a list whether patches in banks are writeable. Default is { false } */
public boolean[] getWriteableBanks() { return new boolean[] { true }; }
/** Return a list whether individual patches can be written. Default is FALSE. */
public boolean getSupportsPatchWrites() { return true; }
public int getPatchNameLength() { return 15; }
public boolean librarianTested() { return true; }
}
|
extended download time
|
edisyn/synth/alesisd4/AlesisD4.java
|
extended download time
|
|
Java
|
apache-2.0
|
f7db7c2c8a2b911fe93cde170f285100bb1a2c1d
| 0
|
reportportal/service-api,reportportal/service-api,reportportal/service-api,reportportal/service-api,reportportal/service-api
|
/*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.ws.controller;
import com.epam.reportportal.commons.ContentTypeResolver;
import com.epam.ta.reportportal.commons.EntityUtils;
import com.epam.ta.reportportal.commons.ReportPortalUser;
import com.epam.ta.reportportal.core.file.GetFileHandler;
import com.epam.ta.reportportal.core.user.EditUserHandler;
import com.epam.ta.reportportal.exception.ReportPortalException;
import com.epam.ta.reportportal.ws.model.OperationCompletionRS;
import io.swagger.annotations.ApiOperation;
import org.apache.commons.io.IOUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.security.core.annotation.AuthenticationPrincipal;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.InputStream;
/**
* @author Dzianis_Shybeka
*/
@RestController
@RequestMapping("/v1/data")
public class FileStorageController {
private final EditUserHandler editUserHandler;
private final GetFileHandler getFileHandler;
private final ContentTypeResolver contentTypeResolver;
@Autowired
public FileStorageController(EditUserHandler editUserHandler, GetFileHandler getFileHandler, ContentTypeResolver contentTypeResolver) {
this.editUserHandler = editUserHandler;
this.getFileHandler = getFileHandler;
this.contentTypeResolver = contentTypeResolver;
}
@Transactional(readOnly = true)
@GetMapping(value = "/{dataId}")
public void getFile(@PathVariable("dataId") String dataId, HttpServletResponse response,
@AuthenticationPrincipal ReportPortalUser user) {
toResponse(response, getFileHandler.loadFileById(dataId));
}
/**
* (non-Javadoc)
*/
@Transactional(readOnly = true)
@GetMapping(value = "/photo")
@ApiOperation("Get photo of current user")
public void getMyPhoto(@AuthenticationPrincipal ReportPortalUser user, HttpServletResponse response) {
toResponse(response, getFileHandler.getUserPhoto(user));
}
/**
* (non-Javadoc)
*/
@Transactional(readOnly = true)
@GetMapping(value = "/userphoto")
@ApiOperation("Get user's photo")
public void getUserPhoto(@RequestParam(value = "id") String username, HttpServletResponse response,
@AuthenticationPrincipal ReportPortalUser user) {
toResponse(response, getFileHandler.getUserPhoto(EntityUtils.normalizeId(username), user));
}
@Transactional
@PostMapping(value = "/photo", consumes = { MediaType.MULTIPART_FORM_DATA_VALUE })
@ApiOperation("Upload user's photo")
public OperationCompletionRS uploadPhoto(@RequestParam("file") MultipartFile file, @AuthenticationPrincipal ReportPortalUser user) {
return editUserHandler.uploadPhoto(EntityUtils.normalizeId(user.getUsername()), file);
}
@Transactional
@DeleteMapping(value = "/photo")
@ApiOperation("Delete user's photo")
public OperationCompletionRS deletePhoto(@AuthenticationPrincipal ReportPortalUser user) {
return editUserHandler.deletePhoto(EntityUtils.normalizeId(user.getUsername()));
}
/**
* Copies data from provided {@link InputStream} to Response
*
* @param response Response
* @param inputStream Stored data
*/
private void toResponse(HttpServletResponse response, InputStream inputStream) {
if (inputStream != null) {
try {
IOUtils.copy(inputStream, response.getOutputStream());
response.setContentType(contentTypeResolver.detectContentType(inputStream));
} catch (IOException e) {
throw new ReportPortalException("Unable to retrieve binary data from data storage", e);
}
} else {
response.setStatus(HttpStatus.NO_CONTENT.value());
}
}
}
|
src/main/java/com/epam/ta/reportportal/ws/controller/FileStorageController.java
|
/*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.ws.controller;
import com.epam.reportportal.commons.ContentTypeResolver;
import com.epam.ta.reportportal.commons.EntityUtils;
import com.epam.ta.reportportal.commons.ReportPortalUser;
import com.epam.ta.reportportal.core.file.GetFileHandler;
import com.epam.ta.reportportal.core.user.EditUserHandler;
import com.epam.ta.reportportal.exception.ReportPortalException;
import com.epam.ta.reportportal.ws.model.OperationCompletionRS;
import io.swagger.annotations.ApiOperation;
import org.apache.commons.io.IOUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.security.core.annotation.AuthenticationPrincipal;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.InputStream;
/**
* @author Dzianis_Shybeka
*/
@RestController
@RequestMapping("/v1/data")
public class FileStorageController {
private final EditUserHandler editUserHandler;
private final GetFileHandler getFileHandler;
private final ContentTypeResolver contentTypeResolver;
@Autowired
public FileStorageController(EditUserHandler editUserHandler, GetFileHandler getFileHandler, ContentTypeResolver contentTypeResolver) {
this.editUserHandler = editUserHandler;
this.getFileHandler = getFileHandler;
this.contentTypeResolver = contentTypeResolver;
}
@Transactional(readOnly = true)
@GetMapping(value = "/{dataId}")
public void getFile(@PathVariable("dataId") String dataId, HttpServletResponse response,
@AuthenticationPrincipal ReportPortalUser user) {
toResponse(response, getFileHandler.loadFileById(dataId));
}
/**
* (non-Javadoc)
*/
@Transactional(readOnly = true)
@GetMapping(value = "/photo")
@ApiOperation("Get photo of current user")
public void getMyPhoto(@AuthenticationPrincipal ReportPortalUser user, HttpServletResponse response) {
toResponse(response, getFileHandler.getUserPhoto(user));
}
/**
* (non-Javadoc)
*/
@Transactional(readOnly = true)
@GetMapping(value = "/userphoto")
@ApiOperation("Get user's photo")
public void getUserPhoto(@RequestParam(value = "id") String username, HttpServletResponse response,
@AuthenticationPrincipal ReportPortalUser user) {
toResponse(response, getFileHandler.getUserPhoto(EntityUtils.normalizeId(username), user));
}
@Transactional
@PostMapping(value = "/photo", consumes = { MediaType.MULTIPART_FORM_DATA_VALUE })
@ApiOperation("Upload user's photo")
public OperationCompletionRS uploadPhoto(@RequestParam("file") MultipartFile file, @AuthenticationPrincipal ReportPortalUser user) {
return editUserHandler.uploadPhoto(EntityUtils.normalizeId(user.getUsername()), file);
}
@Transactional
@DeleteMapping(value = "/photo")
@ApiOperation("Delete user's photo")
public OperationCompletionRS deletePhoto(@AuthenticationPrincipal ReportPortalUser user) {
return editUserHandler.deletePhoto(EntityUtils.normalizeId(user.getUsername()));
}
/**
* Copies data from provided {@link InputStream} to Response
*
* @param response Response
* @param inputStream Stored data
*/
private void toResponse(HttpServletResponse response, InputStream inputStream) {
if (inputStream != null) {
try {
response.setContentType(contentTypeResolver.detectContentType(inputStream));
IOUtils.copy(inputStream, response.getOutputStream());
} catch (IOException e) {
throw new ReportPortalException("Unable to retrieve binary data from data storage", e);
}
} else {
response.setStatus(HttpStatus.NO_CONTENT.value());
}
}
}
|
Attachment type resolving logic moved after copying (#927)
|
src/main/java/com/epam/ta/reportportal/ws/controller/FileStorageController.java
|
Attachment type resolving logic moved after copying (#927)
|
|
Java
|
apache-2.0
|
15949ec6e623e98e3a53c2ac95ebb2643b24b3c4
| 0
|
romanoid/buck,Addepar/buck,Addepar/buck,SeleniumHQ/buck,JoelMarcey/buck,Addepar/buck,JoelMarcey/buck,facebook/buck,zpao/buck,romanoid/buck,kageiit/buck,JoelMarcey/buck,JoelMarcey/buck,facebook/buck,SeleniumHQ/buck,nguyentruongtho/buck,zpao/buck,Addepar/buck,facebook/buck,nguyentruongtho/buck,zpao/buck,JoelMarcey/buck,Addepar/buck,kageiit/buck,facebook/buck,Addepar/buck,nguyentruongtho/buck,SeleniumHQ/buck,SeleniumHQ/buck,JoelMarcey/buck,romanoid/buck,JoelMarcey/buck,Addepar/buck,SeleniumHQ/buck,nguyentruongtho/buck,JoelMarcey/buck,romanoid/buck,romanoid/buck,SeleniumHQ/buck,kageiit/buck,nguyentruongtho/buck,Addepar/buck,SeleniumHQ/buck,Addepar/buck,JoelMarcey/buck,JoelMarcey/buck,romanoid/buck,facebook/buck,romanoid/buck,kageiit/buck,zpao/buck,nguyentruongtho/buck,zpao/buck,JoelMarcey/buck,romanoid/buck,kageiit/buck,romanoid/buck,Addepar/buck,zpao/buck,JoelMarcey/buck,SeleniumHQ/buck,romanoid/buck,SeleniumHQ/buck,SeleniumHQ/buck,Addepar/buck,romanoid/buck,Addepar/buck,JoelMarcey/buck,SeleniumHQ/buck,Addepar/buck,facebook/buck,romanoid/buck,zpao/buck,facebook/buck,kageiit/buck,romanoid/buck,nguyentruongtho/buck,kageiit/buck,SeleniumHQ/buck,SeleniumHQ/buck
|
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.parser;
import com.facebook.buck.core.cell.Cell;
import com.facebook.buck.core.config.BuckConfig;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.model.EmptyTargetConfiguration;
import com.facebook.buck.core.model.platform.ConstraintBasedPlatform;
import com.facebook.buck.core.model.platform.ConstraintResolver;
import com.facebook.buck.core.model.platform.ConstraintValue;
import com.facebook.buck.core.model.platform.Platform;
import com.facebook.buck.core.model.targetgraph.RawTargetNode;
import com.facebook.buck.core.model.targetgraph.TargetNode;
import com.facebook.buck.core.model.targetgraph.impl.TargetNodeFactory;
import com.facebook.buck.core.parser.buildtargetparser.UnconfiguredBuildTargetFactory;
import com.facebook.buck.core.resources.ResourcesConfig;
import com.facebook.buck.core.rules.config.ConfigurationRule;
import com.facebook.buck.core.rules.config.ConfigurationRuleResolver;
import com.facebook.buck.core.rules.config.impl.ConfigurationRuleSelectableResolver;
import com.facebook.buck.core.rules.config.impl.SameThreadConfigurationRuleResolver;
import com.facebook.buck.core.rules.knowntypes.KnownRuleTypesProvider;
import com.facebook.buck.core.rules.platform.PlatformRule;
import com.facebook.buck.core.rules.platform.RuleBasedConstraintResolver;
import com.facebook.buck.core.select.SelectableResolver;
import com.facebook.buck.core.select.SelectorListResolver;
import com.facebook.buck.core.select.impl.DefaultSelectorListResolver;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.io.watchman.Watchman;
import com.facebook.buck.log.GlobalStateManager;
import com.facebook.buck.manifestservice.ManifestService;
import com.facebook.buck.rules.coercer.ConstructorArgMarshaller;
import com.facebook.buck.rules.coercer.TypeCoercerFactory;
import com.facebook.buck.util.ThrowingCloseableMemoizedSupplier;
import com.facebook.buck.util.cache.FileHashCache;
import com.facebook.buck.util.concurrent.CommandThreadFactory;
import com.facebook.buck.util.concurrent.ConcurrencyLimit;
import com.facebook.buck.util.concurrent.MostExecutors;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.IOException;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
/** Version of {@link PerBuildStateFactory} that supports configurable attributes. */
class PerBuildStateFactoryWithConfigurableAttributes extends PerBuildStateFactory {
private final TypeCoercerFactory typeCoercerFactory;
private final ConstructorArgMarshaller marshaller;
private final KnownRuleTypesProvider knownRuleTypesProvider;
private final ParserPythonInterpreterProvider parserPythonInterpreterProvider;
private final Watchman watchman;
private final BuckEventBus eventBus;
private final UnconfiguredBuildTargetFactory unconfiguredBuildTargetFactory;
PerBuildStateFactoryWithConfigurableAttributes(
TypeCoercerFactory typeCoercerFactory,
ConstructorArgMarshaller marshaller,
KnownRuleTypesProvider knownRuleTypesProvider,
ParserPythonInterpreterProvider parserPythonInterpreterProvider,
Watchman watchman,
BuckEventBus eventBus,
ThrowingCloseableMemoizedSupplier<ManifestService, IOException> manifestServiceSupplier,
FileHashCache fileHashCache,
UnconfiguredBuildTargetFactory unconfiguredBuildTargetFactory) {
super(manifestServiceSupplier, fileHashCache);
this.typeCoercerFactory = typeCoercerFactory;
this.marshaller = marshaller;
this.knownRuleTypesProvider = knownRuleTypesProvider;
this.parserPythonInterpreterProvider = parserPythonInterpreterProvider;
this.watchman = watchman;
this.eventBus = eventBus;
this.unconfiguredBuildTargetFactory = unconfiguredBuildTargetFactory;
}
@Override
protected PerBuildStateWithConfigurableAttributes create(
ParsingContext parsingContext,
DaemonicParserState daemonicParserState,
ImmutableList<String> targetPlatforms,
Optional<AtomicLong> parseProcessedBytes) {
Cell rootCell = parsingContext.getCell();
ListeningExecutorService executorService = parsingContext.getExecutor();
SymlinkCache symlinkCache = new SymlinkCache(eventBus, daemonicParserState);
CellManager cellManager = new CellManager(symlinkCache);
TargetNodeListener<TargetNode<?>> symlinkCheckers = cellManager::registerInputsUnderSymlinks;
ParserConfig parserConfig = rootCell.getBuckConfig().getView(ParserConfig.class);
int numParsingThreads = parserConfig.getNumParsingThreads();
DefaultProjectBuildFileParserFactory projectBuildFileParserFactory =
new DefaultProjectBuildFileParserFactory(
typeCoercerFactory,
parserPythonInterpreterProvider,
parsingContext.isProfilingEnabled(),
parseProcessedBytes,
knownRuleTypesProvider,
manifestServiceSupplier,
fileHashCache);
ProjectBuildFileParserPool projectBuildFileParserPool =
new ProjectBuildFileParserPool(
numParsingThreads, // Max parsers to create per cell.
projectBuildFileParserFactory,
parsingContext.isProfilingEnabled());
TargetNodeFactory targetNodeFactory = new TargetNodeFactory(typeCoercerFactory);
BuildFileRawNodeParsePipeline buildFileRawNodeParsePipeline =
new BuildFileRawNodeParsePipeline(
new PipelineNodeCache<>(daemonicParserState.getRawNodeCache()),
projectBuildFileParserPool,
executorService,
eventBus,
watchman);
BuildTargetRawNodeParsePipeline buildTargetRawNodeParsePipeline =
new BuildTargetRawNodeParsePipeline(executorService, buildFileRawNodeParsePipeline);
ListeningExecutorService pipelineExecutorService =
parserConfig.getEnableParallelParsing()
? executorService
: MoreExecutors.newDirectExecutorService();
boolean enableSpeculativeParsing =
parserConfig.getEnableParallelParsing()
&& parsingContext.getSpeculativeParsing() == SpeculativeParsing.ENABLED;
RawTargetNodePipeline rawTargetNodePipeline =
new RawTargetNodePipeline(
pipelineExecutorService,
daemonicParserState.getOrCreateNodeCache(RawTargetNode.class),
eventBus,
buildFileRawNodeParsePipeline,
buildTargetRawNodeParsePipeline,
new DefaultRawTargetNodeFactory(knownRuleTypesProvider, new BuiltTargetVerifier()));
PackageBoundaryChecker packageBoundaryChecker =
new ThrowingPackageBoundaryChecker(daemonicParserState.getBuildFileTrees());
ParserTargetNodeFactory<RawTargetNode> nonResolvingRawTargetNodeToTargetNodeFactory =
new NonResolvingRawTargetNodeToTargetNodeFactory(
DefaultParserTargetNodeFactory.createForParser(
knownRuleTypesProvider,
marshaller,
daemonicParserState.getBuildFileTrees(),
symlinkCheckers,
targetNodeFactory));
// This pipeline uses a direct executor instead of pipelineExecutorService to avoid
// deadlocks happening when too many node are requested from targetNodeParsePipeline.
// That pipeline does blocking calls to get nodes from nonResolvingTargetNodeParsePipeline
// which can lead to deadlocks.
ParsePipeline<TargetNode<?>> nonResolvingTargetNodeParsePipeline =
new RawTargetNodeToTargetNodeParsePipeline(
daemonicParserState.getOrCreateNodeCache(TargetNode.class),
MoreExecutors.newDirectExecutorService(),
rawTargetNodePipeline,
eventBus,
"nonresolving_raw_target_node_parse_pipeline",
enableSpeculativeParsing,
nonResolvingRawTargetNodeToTargetNodeFactory);
ConfigurationRuleResolver configurationRuleResolver =
new SameThreadConfigurationRuleResolver(
cellManager::getCell, nonResolvingTargetNodeParsePipeline::getNode);
SelectableResolver selectableResolver =
new ConfigurationRuleSelectableResolver(configurationRuleResolver);
SelectorListResolver selectorListResolver = new DefaultSelectorListResolver(selectableResolver);
ConstraintResolver constraintResolver =
new RuleBasedConstraintResolver(configurationRuleResolver);
Supplier<Platform> targetPlatform =
Suppliers.memoize(
() ->
getTargetPlatform(
configurationRuleResolver, constraintResolver, rootCell, targetPlatforms));
RawTargetNodeToTargetNodeFactory rawTargetNodeToTargetNodeFactory =
new RawTargetNodeToTargetNodeFactory(
knownRuleTypesProvider,
marshaller,
targetNodeFactory,
packageBoundaryChecker,
symlinkCheckers,
selectorListResolver,
constraintResolver,
targetPlatform);
ListeningExecutorService configuredPipelineExecutor =
MoreExecutors.listeningDecorator(
createExecutorService(rootCell.getBuckConfig(), "configured-pipeline"));
ParsePipeline<TargetNode<?>> targetNodeParsePipeline =
new RawTargetNodeToTargetNodeParsePipeline(
daemonicParserState.getOrCreateNodeCache(TargetNode.class),
configuredPipelineExecutor,
rawTargetNodePipeline,
eventBus,
"configured_raw_target_node_parse_pipeline",
enableSpeculativeParsing,
rawTargetNodeToTargetNodeFactory) {
@Override
public void close() {
super.close();
nonResolvingTargetNodeParsePipeline.close();
rawTargetNodePipeline.close();
try {
MostExecutors.shutdown(configuredPipelineExecutor, 1, TimeUnit.MINUTES);
} catch (InterruptedException e) {
}
}
};
cellManager.register(rootCell);
return new PerBuildStateWithConfigurableAttributes(
cellManager,
buildFileRawNodeParsePipeline,
targetNodeParsePipeline,
parsingContext,
constraintResolver,
selectorListResolver,
targetPlatform);
}
@SuppressWarnings("PMD.AvoidThreadGroup")
private static ExecutorService createExecutorService(BuckConfig buckConfig, String name) {
ConcurrencyLimit concurrencyLimit =
buckConfig.getView(ResourcesConfig.class).getConcurrencyLimit();
return MostExecutors.newMultiThreadExecutor(
new ThreadFactoryBuilder()
.setNameFormat(name + "-%d")
.setThreadFactory(
new CommandThreadFactory(
r -> new Thread(new ThreadGroup(name), r),
GlobalStateManager.singleton().getThreadToCommandRegister()))
.build(),
concurrencyLimit.managedThreadCount);
}
private Platform getTargetPlatform(
ConfigurationRuleResolver configurationRuleResolver,
ConstraintResolver constraintResolver,
Cell rootCell,
ImmutableList<String> targetPlatforms) {
if (targetPlatforms.isEmpty()) {
return new ConstraintBasedPlatform("", ImmutableSet.of());
}
String targetPlatformName = targetPlatforms.get(0);
ConfigurationRule configurationRule =
configurationRuleResolver.getRule(
unconfiguredBuildTargetFactory
.create(rootCell.getCellPathResolver(), targetPlatformName)
.configure(EmptyTargetConfiguration.INSTANCE));
if (!(configurationRule instanceof PlatformRule)) {
throw new HumanReadableException(
"%s is used as a target platform, but not declared using `platform` rule",
targetPlatformName);
}
PlatformRule platformRule = (PlatformRule) configurationRule;
ImmutableSet<ConstraintValue> constraintValues =
platformRule
.getConstrainValues()
.stream()
.map(constraintResolver::getConstraintValue)
.collect(ImmutableSet.toImmutableSet());
return new ConstraintBasedPlatform(targetPlatformName, constraintValues);
}
}
|
src/com/facebook/buck/parser/PerBuildStateFactoryWithConfigurableAttributes.java
|
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.parser;
import com.facebook.buck.core.cell.Cell;
import com.facebook.buck.core.config.BuckConfig;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.model.EmptyTargetConfiguration;
import com.facebook.buck.core.model.platform.ConstraintBasedPlatform;
import com.facebook.buck.core.model.platform.ConstraintResolver;
import com.facebook.buck.core.model.platform.ConstraintValue;
import com.facebook.buck.core.model.platform.Platform;
import com.facebook.buck.core.model.targetgraph.RawTargetNode;
import com.facebook.buck.core.model.targetgraph.TargetNode;
import com.facebook.buck.core.model.targetgraph.impl.TargetNodeFactory;
import com.facebook.buck.core.parser.buildtargetparser.UnconfiguredBuildTargetFactory;
import com.facebook.buck.core.resources.ResourcesConfig;
import com.facebook.buck.core.rules.config.ConfigurationRule;
import com.facebook.buck.core.rules.config.ConfigurationRuleResolver;
import com.facebook.buck.core.rules.config.impl.ConfigurationRuleSelectableResolver;
import com.facebook.buck.core.rules.config.impl.SameThreadConfigurationRuleResolver;
import com.facebook.buck.core.rules.knowntypes.KnownRuleTypesProvider;
import com.facebook.buck.core.rules.platform.PlatformRule;
import com.facebook.buck.core.rules.platform.RuleBasedConstraintResolver;
import com.facebook.buck.core.select.SelectableResolver;
import com.facebook.buck.core.select.SelectorListResolver;
import com.facebook.buck.core.select.impl.DefaultSelectorListResolver;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.io.watchman.Watchman;
import com.facebook.buck.log.GlobalStateManager;
import com.facebook.buck.manifestservice.ManifestService;
import com.facebook.buck.rules.coercer.ConstructorArgMarshaller;
import com.facebook.buck.rules.coercer.TypeCoercerFactory;
import com.facebook.buck.util.ThrowingCloseableMemoizedSupplier;
import com.facebook.buck.util.cache.FileHashCache;
import com.facebook.buck.util.concurrent.CommandThreadFactory;
import com.facebook.buck.util.concurrent.ConcurrencyLimit;
import com.facebook.buck.util.concurrent.MostExecutors;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.IOException;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
/** Version of {@link PerBuildStateFactory} that supports configurable attributes. */
class PerBuildStateFactoryWithConfigurableAttributes extends PerBuildStateFactory {
private final TypeCoercerFactory typeCoercerFactory;
private final ConstructorArgMarshaller marshaller;
private final KnownRuleTypesProvider knownRuleTypesProvider;
private final ParserPythonInterpreterProvider parserPythonInterpreterProvider;
private final Watchman watchman;
private final BuckEventBus eventBus;
private final UnconfiguredBuildTargetFactory unconfiguredBuildTargetFactory;
PerBuildStateFactoryWithConfigurableAttributes(
TypeCoercerFactory typeCoercerFactory,
ConstructorArgMarshaller marshaller,
KnownRuleTypesProvider knownRuleTypesProvider,
ParserPythonInterpreterProvider parserPythonInterpreterProvider,
Watchman watchman,
BuckEventBus eventBus,
ThrowingCloseableMemoizedSupplier<ManifestService, IOException> manifestServiceSupplier,
FileHashCache fileHashCache,
UnconfiguredBuildTargetFactory unconfiguredBuildTargetFactory) {
super(manifestServiceSupplier, fileHashCache);
this.typeCoercerFactory = typeCoercerFactory;
this.marshaller = marshaller;
this.knownRuleTypesProvider = knownRuleTypesProvider;
this.parserPythonInterpreterProvider = parserPythonInterpreterProvider;
this.watchman = watchman;
this.eventBus = eventBus;
this.unconfiguredBuildTargetFactory = unconfiguredBuildTargetFactory;
}
@Override
protected PerBuildStateWithConfigurableAttributes create(
ParsingContext parsingContext,
DaemonicParserState daemonicParserState,
ImmutableList<String> targetPlatforms,
Optional<AtomicLong> parseProcessedBytes) {
Cell rootCell = parsingContext.getCell();
ListeningExecutorService executorService = parsingContext.getExecutor();
SymlinkCache symlinkCache = new SymlinkCache(eventBus, daemonicParserState);
CellManager cellManager = new CellManager(symlinkCache);
TargetNodeListener<TargetNode<?>> symlinkCheckers = cellManager::registerInputsUnderSymlinks;
ParserConfig parserConfig = rootCell.getBuckConfig().getView(ParserConfig.class);
int numParsingThreads = parserConfig.getNumParsingThreads();
DefaultProjectBuildFileParserFactory projectBuildFileParserFactory =
new DefaultProjectBuildFileParserFactory(
typeCoercerFactory,
parserPythonInterpreterProvider,
parsingContext.isProfilingEnabled(),
parseProcessedBytes,
knownRuleTypesProvider,
manifestServiceSupplier,
fileHashCache);
ProjectBuildFileParserPool projectBuildFileParserPool =
new ProjectBuildFileParserPool(
numParsingThreads, // Max parsers to create per cell.
projectBuildFileParserFactory,
parsingContext.isProfilingEnabled());
TargetNodeFactory targetNodeFactory = new TargetNodeFactory(typeCoercerFactory);
BuildFileRawNodeParsePipeline buildFileRawNodeParsePipeline =
new BuildFileRawNodeParsePipeline(
new PipelineNodeCache<>(daemonicParserState.getRawNodeCache()),
projectBuildFileParserPool,
executorService,
eventBus,
watchman);
BuildTargetRawNodeParsePipeline buildTargetRawNodeParsePipeline =
new BuildTargetRawNodeParsePipeline(executorService, buildFileRawNodeParsePipeline);
ListeningExecutorService pipelineExecutorService =
parserConfig.getEnableParallelParsing()
? executorService
: MoreExecutors.newDirectExecutorService();
boolean enableSpeculativeParsing =
parserConfig.getEnableParallelParsing()
&& parsingContext.getSpeculativeParsing() == SpeculativeParsing.ENABLED;
RawTargetNodePipeline rawTargetNodePipeline =
new RawTargetNodePipeline(
pipelineExecutorService,
daemonicParserState.getOrCreateNodeCache(RawTargetNode.class),
eventBus,
buildFileRawNodeParsePipeline,
buildTargetRawNodeParsePipeline,
new DefaultRawTargetNodeFactory(knownRuleTypesProvider, new BuiltTargetVerifier()));
PackageBoundaryChecker packageBoundaryChecker =
new ThrowingPackageBoundaryChecker(daemonicParserState.getBuildFileTrees());
ParserTargetNodeFactory<RawTargetNode> nonResolvingRawTargetNodeToTargetNodeFactory =
new NonResolvingRawTargetNodeToTargetNodeFactory(
DefaultParserTargetNodeFactory.createForParser(
knownRuleTypesProvider,
marshaller,
daemonicParserState.getBuildFileTrees(),
symlinkCheckers,
targetNodeFactory));
// This pipeline uses a direct executor instead of pipelineExecutorService to avoid
// deadlocks happening when too many node are requested from targetNodeParsePipeline.
// That pipeline does blocking calls to get nodes from nonResolvingTargetNodeParsePipeline
// which can lead to deadlocks.
ParsePipeline<TargetNode<?>> nonResolvingTargetNodeParsePipeline =
new RawTargetNodeToTargetNodeParsePipeline(
daemonicParserState.getOrCreateNodeCache(TargetNode.class),
MoreExecutors.newDirectExecutorService(),
rawTargetNodePipeline,
eventBus,
"nonresolving_raw_target_node_parse_pipeline",
enableSpeculativeParsing,
nonResolvingRawTargetNodeToTargetNodeFactory);
ConfigurationRuleResolver configurationRuleResolver =
new SameThreadConfigurationRuleResolver(
cellManager::getCell, nonResolvingTargetNodeParsePipeline::getNode);
SelectableResolver selectableResolver =
new ConfigurationRuleSelectableResolver(configurationRuleResolver);
SelectorListResolver selectorListResolver = new DefaultSelectorListResolver(selectableResolver);
ConstraintResolver constraintResolver =
new RuleBasedConstraintResolver(configurationRuleResolver);
Supplier<Platform> targetPlatform =
Suppliers.memoize(
() ->
getTargetPlatform(
configurationRuleResolver, constraintResolver, rootCell, targetPlatforms));
RawTargetNodeToTargetNodeFactory rawTargetNodeToTargetNodeFactory =
new RawTargetNodeToTargetNodeFactory(
knownRuleTypesProvider,
marshaller,
targetNodeFactory,
packageBoundaryChecker,
symlinkCheckers,
selectorListResolver,
constraintResolver,
targetPlatform);
ListeningExecutorService configuredPipelineExecutor =
MoreExecutors.listeningDecorator(
createExecutorService(rootCell.getBuckConfig(), "configured-pipeline"));
ParsePipeline<TargetNode<?>> targetNodeParsePipeline =
new RawTargetNodeToTargetNodeParsePipeline(
daemonicParserState.getOrCreateNodeCache(TargetNode.class),
configuredPipelineExecutor,
rawTargetNodePipeline,
eventBus,
"configured_raw_target_node_parse_pipeline",
enableSpeculativeParsing,
rawTargetNodeToTargetNodeFactory) {
@Override
public void close() {
super.close();
nonResolvingTargetNodeParsePipeline.close();
try {
MostExecutors.shutdown(configuredPipelineExecutor, 1, TimeUnit.MINUTES);
} catch (InterruptedException e) {
}
}
};
cellManager.register(rootCell);
return new PerBuildStateWithConfigurableAttributes(
cellManager,
buildFileRawNodeParsePipeline,
targetNodeParsePipeline,
parsingContext,
constraintResolver,
selectorListResolver,
targetPlatform);
}
@SuppressWarnings("PMD.AvoidThreadGroup")
private static ExecutorService createExecutorService(BuckConfig buckConfig, String name) {
ConcurrencyLimit concurrencyLimit =
buckConfig.getView(ResourcesConfig.class).getConcurrencyLimit();
return MostExecutors.newMultiThreadExecutor(
new ThreadFactoryBuilder()
.setNameFormat(name + "-%d")
.setThreadFactory(
new CommandThreadFactory(
r -> new Thread(new ThreadGroup(name), r),
GlobalStateManager.singleton().getThreadToCommandRegister()))
.build(),
concurrencyLimit.managedThreadCount);
}
private Platform getTargetPlatform(
ConfigurationRuleResolver configurationRuleResolver,
ConstraintResolver constraintResolver,
Cell rootCell,
ImmutableList<String> targetPlatforms) {
if (targetPlatforms.isEmpty()) {
return new ConstraintBasedPlatform("", ImmutableSet.of());
}
String targetPlatformName = targetPlatforms.get(0);
ConfigurationRule configurationRule =
configurationRuleResolver.getRule(
unconfiguredBuildTargetFactory
.create(rootCell.getCellPathResolver(), targetPlatformName)
.configure(EmptyTargetConfiguration.INSTANCE));
if (!(configurationRule instanceof PlatformRule)) {
throw new HumanReadableException(
"%s is used as a target platform, but not declared using `platform` rule",
targetPlatformName);
}
PlatformRule platformRule = (PlatformRule) configurationRule;
ImmutableSet<ConstraintValue> constraintValues =
platformRule
.getConstrainValues()
.stream()
.map(constraintResolver::getConstraintValue)
.collect(ImmutableSet.toImmutableSet());
return new ConstraintBasedPlatform(targetPlatformName, constraintValues);
}
}
|
Close rawTargetNodePipeline in PerBuildStateFactoryWithConfigurableAttributes
Summary: This needs to be closed.
Reviewed By: sbalabanov
fbshipit-source-id: 146c4ab767
|
src/com/facebook/buck/parser/PerBuildStateFactoryWithConfigurableAttributes.java
|
Close rawTargetNodePipeline in PerBuildStateFactoryWithConfigurableAttributes
|
|
Java
|
bsd-3-clause
|
eefe35a14564a3284d80b73e691f0f5fb1cc4d18
| 0
|
abego/treelayout,abego/treelayout,abego/treelayout
|
/*
* [The "BSD license"]
* Copyright (c) 2011, abego Software GmbH, Germany (http://www.abego.org)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the abego Software GmbH nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.abego.treelayout;
import static org.abego.treelayout.internal.util.Contract.checkArg;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.abego.treelayout.Configuration.AlignmentInLevel;
import org.abego.treelayout.Configuration.Location;
/**
* Implements the actual tree layout algorithm.
* <p>
* The nodes with their final layout can be retrieved through
* {@link #getNodeBounds()}.
* <p>
* See <a href="package-summary.html">this summary</a> to get an overview how to
* use TreeLayout.
*
*
* @author Udo Borkowski (ub@abego.org)
*
* @param <TreeNode>
*/
public class TreeLayout<TreeNode> {
/*
* Differences between this implementation and original algorithm
* --------------------------------------------------------------
*
* For easier reference the same names (or at least similar names) as in the
* paper of Buchheim, Jünger, and Leipert are used in this
* implementation. However in the external interface "first" and "last" are
* used instead of "left most" and "right most". The implementation also
* supports tree layouts with the root at the left (or right) side. In that
* case using "left most" would refer to the "top" child, i.e. using "first"
* is less confusing.
*
* Also the y coordinate is not the level but directly refers the y
* coordinate of a level, taking node's height and gapBetweenLevels into
* account. When the root is at the left or right side the y coordinate
* actually becomes an x coordinate.
*
* Instead of just using a constant "distance" to calculate the position to
* the next node we refer to the "size" (width or height) of the node and a
* "gapBetweenNodes".
*/
// ------------------------------------------------------------------------
// tree
private final TreeForTreeLayout<TreeNode> tree;
/**
* Returns the Tree the layout is created for.
*/
public TreeForTreeLayout<TreeNode> getTree() {
return tree;
}
// ------------------------------------------------------------------------
// nodeExtentProvider
private final NodeExtentProvider<TreeNode> nodeExtentProvider;
/**
* Returns the {@link NodeExtentProvider} used by this {@link TreeLayout}.
*/
public NodeExtentProvider<TreeNode> getNodeExtentProvider() {
return nodeExtentProvider;
}
private double getNodeHeight(TreeNode node) {
return nodeExtentProvider.getHeight(node);
}
private double getNodeWidth(TreeNode node) {
return nodeExtentProvider.getWidth(node);
}
private double getWidthOrHeightOfNode(TreeNode treeNode, boolean returnWidth) {
return returnWidth ? getNodeWidth(treeNode) : getNodeHeight(treeNode);
}
/**
* When the level changes in Y-axis (i.e. root location Top or Bottom) the
* height of a node is its thickness, otherwise the node's width is its
* thickness.
* <p>
* The thickness of a node is used when calculating the locations of the
* levels.
*
* @param treeNode
* @return
*/
private double getNodeThickness(TreeNode treeNode) {
return getWidthOrHeightOfNode(treeNode, !isLevelChangeInYAxis());
}
/**
* When the level changes in Y-axis (i.e. root location Top or Bottom) the
* width of a node is its size, otherwise the node's height is its size.
* <p>
* The size of a node is used when calculating the distance between two
* nodes.
*
* @param treeNode
* @return
*/
private double getNodeSize(TreeNode treeNode) {
return getWidthOrHeightOfNode(treeNode, isLevelChangeInYAxis());
}
// ------------------------------------------------------------------------
// configuration
private final Configuration<TreeNode> configuration;
/**
* Returns the Configuration used by this {@link TreeLayout}.
*/
public Configuration<TreeNode> getConfiguration() {
return configuration;
}
private boolean isLevelChangeInYAxis() {
Location rootLocation = configuration.getRootLocation();
return rootLocation == Location.Top || rootLocation == Location.Bottom;
}
private int getLevelChangeSign() {
Location rootLocation = configuration.getRootLocation();
return rootLocation == Location.Bottom
|| rootLocation == Location.Right ? -1 : 1;
}
// ------------------------------------------------------------------------
// bounds
private double boundsLeft = Double.MAX_VALUE;
private double boundsRight = Double.MIN_VALUE;
private double boundsTop = Double.MAX_VALUE;
private double boundsBottom = Double.MIN_VALUE;
private void updateBounds(TreeNode node, double centerX, double centerY) {
double width = getNodeWidth(node);
double height = getNodeHeight(node);
double left = centerX - width / 2;
double right = centerX + width / 2;
double top = centerY - height / 2;
double bottom = centerY + height / 2;
if (boundsLeft > left) {
boundsLeft = left;
}
if (boundsRight < right) {
boundsRight = right;
}
if (boundsTop > top) {
boundsTop = top;
}
if (boundsBottom < bottom) {
boundsBottom = bottom;
}
}
/**
* Returns the bounds of the tree layout.
* <p>
* The bounds of a TreeLayout is the smallest rectangle containing the
* bounds of all nodes in the layout. It always starts at (0,0).
*
* @return the bounds of the tree layout
*/
public Rectangle2D getBounds() {
return new Rectangle2D.Double(0, 0, boundsRight - boundsLeft,
boundsBottom - boundsTop);
}
// ------------------------------------------------------------------------
// size of level
private final List<Double> sizeOfLevel = new ArrayList<Double>();
private void calcSizeOfLevels(TreeNode node, int level) {
double oldSize;
if (sizeOfLevel.size() <= level) {
sizeOfLevel.add(Double.valueOf(0));
oldSize = 0;
} else {
oldSize = sizeOfLevel.get(level);
}
double size = getNodeThickness(node);
// size = nodeExtentProvider.getHeight(node);
if (oldSize < size) {
sizeOfLevel.set(level, size);
}
if (!tree.isLeaf(node)) {
for (TreeNode child : tree.getChildren(node)) {
calcSizeOfLevels(child, level + 1);
}
}
}
/**
* Returns the number of levels of the tree.
*
* @return [level > 0]
*/
public int getLevelCount() {
return sizeOfLevel.size();
}
/**
* Returns the size of a level.
* <p>
* When the root is located at the top or bottom the size of a level is the
* maximal height of the nodes of that level. When the root is located at
* the left or right the size of a level is the maximal width of the nodes
* of that level.
*
* @param level
* @return the size of the level [level >= 0 && level < levelCount]
*/
public double getSizeOfLevel(int level) {
checkArg(level >= 0, "level must be >= 0");
checkArg(level < getLevelCount(), "level must be < levelCount");
return sizeOfLevel.get(level);
}
// ------------------------------------------------------------------------
// NormalizedPosition
/**
* The algorithm calculates the position starting with the root at 0. I.e.
* the left children will get negative positions. However we want the result
* to be normalized to (0,0).
* <p>
* {@link NormalizedPosition} will normalize the position (given relative to
* the root position), taking the current bounds into account. This way the
* left most node bounds will start at x = 0, the top most node bounds at y
* = 0.
*/
private class NormalizedPosition extends Point2D {
private double x_relativeToRoot;
private double y_relativeToRoot;
public NormalizedPosition(double x_relativeToRoot,
double y_relativeToRoot) {
setLocation(x_relativeToRoot, y_relativeToRoot);
}
@Override
public double getX() {
return x_relativeToRoot - boundsLeft;
}
@Override
public double getY() {
return y_relativeToRoot - boundsTop;
}
@Override
// never called from outside
public void setLocation(double x_relativeToRoot, double y_relativeToRoot) {
this.x_relativeToRoot = x_relativeToRoot;
this.y_relativeToRoot = y_relativeToRoot;
}
}
// ------------------------------------------------------------------------
// The Algorithm
private final Map<TreeNode, Double> mod = new HashMap<TreeNode, Double>();
private final Map<TreeNode, TreeNode> thread = new HashMap<TreeNode, TreeNode>();
private final Map<TreeNode, Double> prelim = new HashMap<TreeNode, Double>();
private final Map<TreeNode, Double> change = new HashMap<TreeNode, Double>();
private final Map<TreeNode, Double> shift = new HashMap<TreeNode, Double>();
private final Map<TreeNode, TreeNode> ancestor = new HashMap<TreeNode, TreeNode>();
private final Map<TreeNode, Integer> number = new HashMap<TreeNode, Integer>();
private final Map<TreeNode, Point2D> positions = new HashMap<TreeNode, Point2D>();
private double getMod(TreeNode node) {
Double d = mod.get(node);
return d != null ? d.doubleValue() : 0;
}
private void setMod(TreeNode node, double d) {
mod.put(node, d);
}
private TreeNode getThread(TreeNode node) {
TreeNode n = thread.get(node);
return n != null ? n : null;
}
private void setThread(TreeNode node, TreeNode thread) {
this.thread.put(node, thread);
}
private TreeNode getAncestor(TreeNode node) {
TreeNode n = ancestor.get(node);
return n != null ? n : node;
}
private void setAncestor(TreeNode node, TreeNode ancestor) {
this.ancestor.put(node, ancestor);
}
private double getPrelim(TreeNode node) {
Double d = prelim.get(node);
return d != null ? d.doubleValue() : 0;
}
private void setPrelim(TreeNode node, double d) {
prelim.put(node, d);
}
private double getChange(TreeNode node) {
Double d = change.get(node);
return d != null ? d.doubleValue() : 0;
}
private void setChange(TreeNode node, double d) {
change.put(node, d);
}
private double getShift(TreeNode node) {
Double d = shift.get(node);
return d != null ? d.doubleValue() : 0;
}
private void setShift(TreeNode node, double d) {
shift.put(node, d);
}
/**
* The distance of two nodes is the distance of the centers of both noded.
* <p>
* I.e. the distance includes the gap between the nodes and half of the
* sizes of the nodes.
*
* @param v
* @param w
* @return the distance between node v and w
*/
private double getDistance(TreeNode v, TreeNode w) {
double sizeOfNodes = getNodeSize(v) + getNodeSize(w);
double distance = sizeOfNodes / 2
+ configuration.getGapBetweenNodes(v, w);
return distance;
}
private TreeNode nextLeft(TreeNode v) {
return tree.isLeaf(v) ? getThread(v) : tree.getFirstChild(v);
}
private TreeNode nextRight(TreeNode v) {
return tree.isLeaf(v) ? getThread(v) : tree.getLastChild(v);
}
/**
*
* @param node
* [tree.isChildOfParent(node, parentNode)]
* @param parentNode
* parent of node
* @return
*/
private int getNumber(TreeNode node, TreeNode parentNode) {
Integer n = number.get(node);
if (n == null) {
int i = 1;
for (TreeNode child : tree.getChildren(parentNode)) {
number.put(child, i++);
}
n = number.get(node);
}
return n.intValue();
}
/**
*
* @param vIMinus
* @param v
* @param parentOfV
* @param defaultAncestor
* @return the greatest distinct ancestor of vIMinus and its right neighbor
* v
*/
private TreeNode ancestor(TreeNode vIMinus, TreeNode v, TreeNode parentOfV,
TreeNode defaultAncestor) {
TreeNode ancestor = getAncestor(vIMinus);
// when the ancestor of vIMinus is a sibling of v (i.e. has the same
// parent as v) it is also the greatest distinct ancestor vIMinus and
// v. Otherwise it is the defaultAncestor
return tree.isChildOfParent(ancestor, parentOfV) ? ancestor
: defaultAncestor;
}
private void moveSubtree(TreeNode wMinus, TreeNode wPlus, TreeNode parent,
double shift) {
int subtrees = getNumber(wPlus, parent) - getNumber(wMinus, parent);
setChange(wPlus, getChange(wPlus) - shift / subtrees);
setShift(wPlus, getShift(wPlus) + shift);
setChange(wMinus, getChange(wMinus) + shift / subtrees);
setPrelim(wPlus, getPrelim(wPlus) + shift);
setMod(wPlus, getMod(wPlus) + shift);
}
/**
* In difference to the original algorithm we also pass in the leftSibling
* and the parent of v.
* <p>
* <b>Why adding the parameter 'parent of v' (parentOfV) ?</b>
* <p>
* In this method we need access to the parent of v. Not every tree
* implementation may support efficient (i.e. constant time) access to it.
* On the other hand the (only) caller of this method can provide this
* information with only constant extra time.
* <p>
* Also we need access to the "left most sibling" of v. Not every tree
* implementation may support efficient (i.e. constant time) access to it.
* On the other hand the "left most sibling" of v is also the "first child"
* of the parent of v. The first child of a parent node we can get in
* constant time. As we got the parent of v we can so also get the
* "left most sibling" of v in constant time.
* <p>
* <b>Why adding the parameter 'leftSibling' ?</b>
* <p>
* In this method we need access to the "left sibling" of v. Not every tree
* implementation may support efficient (i.e. constant time) access to it.
* However it is easy for the caller of this method to provide this
* information with only constant extra time.
* <p>
* <p>
* <p>
* In addition these extra parameters avoid the need for
* {@link TreeForTreeLayout} to include extra methods "getParent",
* "getLeftSibling", or "getLeftMostSibling". This keeps the interface
* {@link TreeForTreeLayout} small and avoids redundant implementations.
*
* @param v
* @param defaultAncestor
* @param leftSibling
* [nullable] the left sibling v, if there is any
* @param parentOfV
* the parent of v
* @return the (possibly changes) defaultAncestor
*/
private TreeNode apportion(TreeNode v, TreeNode defaultAncestor,
TreeNode leftSibling, TreeNode parentOfV) {
TreeNode w = leftSibling;
if (w == null) {
// v has no left sibling
return defaultAncestor;
}
// v has left sibling w
// The following variables "v..." are used to traverse the contours to
// the subtrees. "Minus" refers to the left, "Plus" to the right
// subtree. "I" refers to the "inside" and "O" to the outside contour.
TreeNode vOPlus = v;
TreeNode vIPlus = v;
TreeNode vIMinus = w;
// get leftmost sibling of vIPlus, i.e. get the leftmost sibling of
// v, i.e. the leftmost child of the parent of v (which is passed
// in)
TreeNode vOMinus = tree.getFirstChild(parentOfV);
Double sIPlus = getMod(vIPlus);
Double sOPlus = getMod(vOPlus);
Double sIMinus = getMod(vIMinus);
Double sOMinus = getMod(vOMinus);
TreeNode nextRightVIMinus = nextRight(vIMinus);
TreeNode nextLeftVIPlus = nextLeft(vIPlus);
while (nextRightVIMinus != null && nextLeftVIPlus != null) {
vIMinus = nextRightVIMinus;
vIPlus = nextLeftVIPlus;
vOMinus = nextLeft(vOMinus);
vOPlus = nextRight(vOPlus);
setAncestor(vOPlus, v);
double shift = (getPrelim(vIMinus) + sIMinus)
- (getPrelim(vIPlus) + sIPlus)
+ getDistance(vIMinus, vIPlus);
if (shift > 0) {
moveSubtree(ancestor(vIMinus, v, parentOfV, defaultAncestor),
v, parentOfV, shift);
sIPlus = sIPlus + shift;
sOPlus = sOPlus + shift;
}
sIMinus = sIMinus + getMod(vIMinus);
sIPlus = sIPlus + getMod(vIPlus);
sOMinus = sOMinus + getMod(vOMinus);
sOPlus = sOPlus + getMod(vOPlus);
nextRightVIMinus = nextRight(vIMinus);
nextLeftVIPlus = nextLeft(vIPlus);
}
if (nextRightVIMinus != null && nextRight(vOPlus) == null) {
setThread(vOPlus, nextRightVIMinus);
setMod(vOPlus, getMod(vOPlus) + sIMinus - sOPlus);
}
if (nextLeftVIPlus != null && nextLeft(vOMinus) == null) {
setThread(vOMinus, nextLeftVIPlus);
setMod(vOMinus, getMod(vOMinus) + sIPlus - sOMinus);
defaultAncestor = v;
}
return defaultAncestor;
}
/**
*
* @param v
* [!tree.isLeaf(v)]
*/
private void executeShifts(TreeNode v) {
double shift = 0;
double change = 0;
for (TreeNode w : tree.getChildrenReverse(v)) {
change = change + getChange(w);
setPrelim(w, getPrelim(w) + shift);
setMod(w, getMod(w) + shift);
shift = shift + getShift(w) + change;
}
}
/**
* In difference to the original algorithm we also pass in the leftSibling
* (see {@link #apportion(Object, Object, Object, Object)} for a
* motivation).
*
* @param v
* @param leftSibling
* [nullable] the left sibling v, if there is any
*/
private void firstWalk(TreeNode v, TreeNode leftSibling) {
if (tree.isLeaf(v)) {
// No need to set prelim(v) to 0 as the getter takes care of this.
TreeNode w = leftSibling;
if (w != null) {
// v has left sibling
setPrelim(v, getPrelim(w) + getDistance(v, w));
}
} else {
// v is not a leaf
TreeNode defaultAncestor = tree.getFirstChild(v);
TreeNode previousChild = null;
for (TreeNode w : tree.getChildren(v)) {
firstWalk(w, previousChild);
defaultAncestor = apportion(w, defaultAncestor, previousChild,
v);
previousChild = w;
}
executeShifts(v);
double midpoint = (getPrelim(tree.getFirstChild(v)) + getPrelim(tree
.getLastChild(v))) / 2.0;
TreeNode w = leftSibling;
if (w != null) {
// v has left sibling
setPrelim(v, getPrelim(w) + getDistance(v, w));
setMod(v, getPrelim(v) - midpoint);
} else {
// v has no left sibling
setPrelim(v, midpoint);
}
}
}
/**
* In difference to the original algorithm we also pass in extra level
* information.
*
* @param v
* @param m
* @param level
* @param levelStart
*/
private void secondWalk(TreeNode v, double m, int level, double levelStart) {
// construct the position from the prelim and the level information
// The rootLocation affects the way how x and y are changed and in what
// direction.
double levelChangeSign = getLevelChangeSign();
boolean levelChangeOnYAxis = isLevelChangeInYAxis();
double levelSize = getSizeOfLevel(level);
double x = getPrelim(v) + m;
double y;
AlignmentInLevel alignment = configuration.getAlignmentInLevel();
if (alignment == AlignmentInLevel.Center) {
y = levelStart + levelChangeSign * (levelSize / 2);
} else if (alignment == AlignmentInLevel.TowardsRoot) {
y = levelStart + levelChangeSign * (getNodeThickness(v) / 2);
} else {
y = levelStart + levelSize - levelChangeSign
* (getNodeThickness(v) / 2);
}
if (!levelChangeOnYAxis) {
double t = x;
x = y;
y = t;
}
positions.put(v, new NormalizedPosition(x, y));
// update the bounds
updateBounds(v, x, y);
// recurse
if (!tree.isLeaf(v)) {
double nextLevelStart = levelStart
+ (levelSize + configuration.getGapBetweenLevels(level + 1))
* levelChangeSign;
for (TreeNode w : tree.getChildren(v)) {
secondWalk(w, m + getMod(v), level + 1, nextLevelStart);
}
}
}
// ------------------------------------------------------------------------
// nodeBounds
private Map<TreeNode, Rectangle2D.Double> nodeBounds;
/**
* Returns the layout of the tree nodes by mapping each node of the tree to
* its bounds (position and size).
* <p>
* For each rectangle x and y will be >= 0. At least one rectangle will have
* an x == 0 and at least one rectangle will have an y == 0.
*
* @return maps each node of the tree to its bounds (position and size).
*/
public Map<TreeNode, Rectangle2D.Double> getNodeBounds() {
if (nodeBounds == null) {
nodeBounds = new HashMap<TreeNode, Rectangle2D.Double>();
for (Entry<TreeNode, Point2D> entry : positions.entrySet()) {
TreeNode node = entry.getKey();
Point2D pos = entry.getValue();
double w = getNodeWidth(node);
double h = getNodeHeight(node);
double x = pos.getX() - w / 2;
double y = pos.getY() - h / 2;
nodeBounds.put(node, new Rectangle2D.Double(x, y, w, h));
}
}
return nodeBounds;
}
// ------------------------------------------------------------------------
// constructor
/**
* Creates a TreeLayout for a given tree.
* <p>
* In addition to the tree the {@link NodeExtentProvider} and the
* {@link Configuration} must be given.
*/
public TreeLayout(TreeForTreeLayout<TreeNode> tree,
NodeExtentProvider<TreeNode> nodeExtentProvider,
Configuration<TreeNode> configuration) {
this.tree = tree;
this.nodeExtentProvider = nodeExtentProvider;
this.configuration = configuration;
// No need to explicitly set mod, thread and ancestor as their getters
// are taking care of the initial values. This avoids a full tree walk
// through and saves some memory as no entries are added for
// "initial values".
TreeNode r = tree.getRoot();
firstWalk(r, null);
calcSizeOfLevels(r, 0);
secondWalk(r, -getPrelim(r), 0, 0);
}
private void addUniqueNodes(Set<TreeNode> nodes, TreeNode newNode) {
if (!nodes.add(newNode)) {
throw new RuntimeException(String.format(
"Node used more than once in tree: %s", newNode));
}
for (TreeNode n : tree.getChildren(newNode)) {
addUniqueNodes(nodes,n);
}
}
/**
* Check if the tree is a "valid" tree.
* <p>
* Typically you will use this method during development when you get an
* unexpected layout from your trees.
* <p>
* The following checks are performed:
* <ul>
* <li>Each node must only occur once in the tree.</li>
* </ul>
*/
public void checkTree() {
Set<TreeNode> nodes = new HashSet<TreeNode>();
// Traverse the tree and check if each node is only used once.
addUniqueNodes(nodes,tree.getRoot());
}
private void dumpTree(PrintStream output, TreeNode node, int indent,
boolean includeNodeSize) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < indent; i++) {
sb.append(" ");
}
sb.append(node);
if (includeNodeSize) {
sb.append(" (size: ");
sb.append(getNodeWidth(node));
sb.append("x");
sb.append(getNodeHeight(node));
sb.append(")");
}
output.println(sb.toString());
for (TreeNode n : tree.getChildren(node)) {
dumpTree(output, n, indent + 1, includeNodeSize);
}
}
/**
* Prints a dump of the tree to the given printStream, using the node's
* "toString" method.
*
* @param printStream
* @param includeNodeSize
* when true the dump also includes the size of each node,
* otherwise not.
*/
public void dumpTree(PrintStream printStream, boolean includeNodeSize) {
dumpTree(printStream,tree.getRoot(),0, includeNodeSize);
}
}
|
org.abego.treelayout/src/main/java/org/abego/treelayout/TreeLayout.java
|
/*
* [The "BSD license"]
* Copyright (c) 2011, abego Software GmbH, Germany (http://www.abego.org)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the abego Software GmbH nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.abego.treelayout;
import static org.abego.treelayout.internal.util.Contract.checkArg;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.abego.treelayout.Configuration.AlignmentInLevel;
import org.abego.treelayout.Configuration.Location;
/**
* Implements the actual tree layout algorithm.
* <p>
* The nodes with their final layout can be retrieved through
* {@link #getNodeBounds()}.
* <p>
* See <a href="package-summary.html">this summary</a> to get an overview how to
* use TreeLayout.
*
*
* @author Udo Borkowski (ub@abego.org)
*
* @param <TreeNode>
*/
public class TreeLayout<TreeNode> {
/*
* Differences between this implementation and original algorithm
* --------------------------------------------------------------
*
* For easier reference the same names (or at least similar names) as in the
* paper of Buchheim, Jünger, and Leipert are used in this
* implementation. However in the external interface "first" and "last" are
* used instead of "left most" and "right most". The implementation also
* supports tree layouts with the root at the left (or right) side. In that
* case using "left most" would refer to the "top" child, i.e. using "first"
* is less confusing.
*
* Also the y coordinate is not the level but directly refers the y
* coordinate of a level, taking node's height and gapBetweenLevels into
* account. When the root is at the left or right side the y coordinate
* actually becomes an x coordinate.
*
* Instead of just using a constant "distance" to calculate the position to
* the next node we refer to the "size" (width or height) of the node and a
* "gapBetweenNodes".
*/
// ------------------------------------------------------------------------
// tree
private final TreeForTreeLayout<TreeNode> tree;
/**
* Returns the Tree the layout is created for.
*/
public TreeForTreeLayout<TreeNode> getTree() {
return tree;
}
// ------------------------------------------------------------------------
// nodeExtentProvider
private final NodeExtentProvider<TreeNode> nodeExtentProvider;
/**
* Returns the {@link NodeExtentProvider} used by this {@link TreeLayout}.
*/
public NodeExtentProvider<TreeNode> getNodeExtentProvider() {
return nodeExtentProvider;
}
private double getNodeHeight(TreeNode node) {
return nodeExtentProvider.getHeight(node);
}
private double getNodeWidth(TreeNode node) {
return nodeExtentProvider.getWidth(node);
}
private double getWidthOrHeightOfNode(TreeNode treeNode, boolean returnWidth) {
return returnWidth ? getNodeWidth(treeNode) : getNodeHeight(treeNode);
}
/**
* When the level changes in Y-axis (i.e. root location Top or Bottom) the
* height of a node is its thickness, otherwise the node's width is its
* thickness.
* <p>
* The thickness of a node is used when calculating the locations of the
* levels.
*
* @param treeNode
* @return
*/
private double getNodeThickness(TreeNode treeNode) {
return getWidthOrHeightOfNode(treeNode, !isLevelChangeInYAxis());
}
/**
* When the level changes in Y-axis (i.e. root location Top or Bottom) the
* width of a node is its size, otherwise the node's height is its size.
* <p>
* The size of a node is used when calculating the distance between two
* nodes.
*
* @param treeNode
* @return
*/
private double getNodeSize(TreeNode treeNode) {
return getWidthOrHeightOfNode(treeNode, isLevelChangeInYAxis());
}
// ------------------------------------------------------------------------
// configuration
private final Configuration<TreeNode> configuration;
/**
* Returns the Configuration used by this {@link TreeLayout}.
*/
public Configuration<TreeNode> getConfiguration() {
return configuration;
}
private boolean isLevelChangeInYAxis() {
Location rootLocation = configuration.getRootLocation();
return rootLocation == Location.Top || rootLocation == Location.Bottom;
}
private int getLevelChangeSign() {
Location rootLocation = configuration.getRootLocation();
return rootLocation == Location.Bottom
|| rootLocation == Location.Right ? -1 : 1;
}
// ------------------------------------------------------------------------
// bounds
private double boundsLeft = Double.MAX_VALUE;
private double boundsRight = Double.MIN_VALUE;
private double boundsTop = Double.MAX_VALUE;
private double boundsBottom = Double.MIN_VALUE;
private void updateBounds(TreeNode node, double centerX, double centerY) {
double width = getNodeWidth(node);
double height = getNodeHeight(node);
double left = centerX - width / 2;
double right = centerX + width / 2;
double top = centerY - height / 2;
double bottom = centerY + height / 2;
if (boundsLeft > left) {
boundsLeft = left;
}
if (boundsRight < right) {
boundsRight = right;
}
if (boundsTop > top) {
boundsTop = top;
}
if (boundsBottom < bottom) {
boundsBottom = bottom;
}
}
/**
* Returns the bounds of the tree layout.
* <p>
* The bounds of a TreeLayout is the smallest rectangle containing the
* bounds of all nodes in the layout. It always starts at (0,0).
*
* @return the bounds of the tree layout
*/
public Rectangle2D getBounds() {
return new Rectangle2D.Double(0, 0, boundsRight - boundsLeft,
boundsBottom - boundsTop);
}
// ------------------------------------------------------------------------
// size of level
private final List<Double> sizeOfLevel = new ArrayList<Double>();
private void calcSizeOfLevels(TreeNode node, int level) {
double oldSize;
if (sizeOfLevel.size() <= level) {
sizeOfLevel.add(Double.valueOf(0));
oldSize = 0;
} else {
oldSize = sizeOfLevel.get(level);
}
double size = getNodeThickness(node);
// size = nodeExtentProvider.getHeight(node);
if (oldSize < size) {
sizeOfLevel.set(level, size);
}
if (!tree.isLeaf(node)) {
for (TreeNode child : tree.getChildren(node)) {
calcSizeOfLevels(child, level + 1);
}
}
}
/**
* Returns the number of levels of the tree.
*
* @return [level > 0]
*/
public int getLevelCount() {
return sizeOfLevel.size();
}
/**
* Returns the size of a level.
* <p>
* When the root is located at the top or bottom the size of a level is the
* maximal height of the nodes of that level. When the root is located at
* the left or right the size of a level is the maximal width of the nodes
* of that level.
*
* @param level
* @return the size of the level [level >= 0 && level < levelCount]
*/
public double getSizeOfLevel(int level) {
checkArg(level >= 0, "level must be >= 0");
checkArg(level < getLevelCount(), "level must be < levelCount");
return sizeOfLevel.get(level);
}
// ------------------------------------------------------------------------
// NormalizedPosition
/**
* The algorithm calculates the position starting with the root at 0. I.e.
* the left children will get negative positions. However we want the result
* to be normalized to (0,0).
* <p>
* {@link NormalizedPosition} will normalize the position (given relative to
* the root position), taking the current bounds into account. This way the
* left most node bounds will start at x = 0, the top most node bounds at y
* = 0.
*/
private class NormalizedPosition extends Point2D {
private double x_relativeToRoot;
private double y_relativeToRoot;
public NormalizedPosition(double x_relativeToRoot,
double y_relativeToRoot) {
setLocation(x_relativeToRoot, y_relativeToRoot);
}
@Override
public double getX() {
return x_relativeToRoot - boundsLeft;
}
@Override
public double getY() {
return y_relativeToRoot - boundsTop;
}
@Override
// never called from outside
public void setLocation(double x_relativeToRoot, double y_relativeToRoot) {
this.x_relativeToRoot = x_relativeToRoot;
this.y_relativeToRoot = y_relativeToRoot;
}
}
// ------------------------------------------------------------------------
// The Algorithm
private final Map<TreeNode, Double> mod = new HashMap<TreeNode, Double>();
private final Map<TreeNode, TreeNode> thread = new HashMap<TreeNode, TreeNode>();
private final Map<TreeNode, Double> prelim = new HashMap<TreeNode, Double>();
private final Map<TreeNode, Double> change = new HashMap<TreeNode, Double>();
private final Map<TreeNode, Double> shift = new HashMap<TreeNode, Double>();
private final Map<TreeNode, TreeNode> ancestor = new HashMap<TreeNode, TreeNode>();
private final Map<TreeNode, Integer> number = new HashMap<TreeNode, Integer>();
private final Map<TreeNode, Point2D> positions = new HashMap<TreeNode, Point2D>();
private double getMod(TreeNode node) {
Double d = mod.get(node);
return d != null ? d.doubleValue() : 0;
}
private void setMod(TreeNode node, double d) {
mod.put(node, d);
}
private TreeNode getThread(TreeNode node) {
TreeNode n = thread.get(node);
return n != null ? n : null;
}
private void setThread(TreeNode node, TreeNode thread) {
this.thread.put(node, thread);
}
private TreeNode getAncestor(TreeNode node) {
TreeNode n = ancestor.get(node);
return n != null ? n : node;
}
private void setAncestor(TreeNode node, TreeNode ancestor) {
this.ancestor.put(node, ancestor);
}
private double getPrelim(TreeNode node) {
Double d = prelim.get(node);
return d != null ? d.doubleValue() : 0;
}
private void setPrelim(TreeNode node, double d) {
prelim.put(node, d);
}
private double getChange(TreeNode node) {
Double d = change.get(node);
return d != null ? d.doubleValue() : 0;
}
private void setChange(TreeNode node, double d) {
change.put(node, d);
}
private double getShift(TreeNode node) {
Double d = shift.get(node);
return d != null ? d.doubleValue() : 0;
}
private void setShift(TreeNode node, double d) {
shift.put(node, d);
}
/**
* The distance of two nodes is the distance of the centers of both noded.
* <p>
* I.e. the distance includes the gap between the nodes and half of the
* sizes of the nodes.
*
* @param v
* @param w
* @return the distance between node v and w
*/
private double getDistance(TreeNode v, TreeNode w) {
double sizeOfNodes = getNodeSize(v) + getNodeSize(w);
double distance = sizeOfNodes / 2
+ configuration.getGapBetweenNodes(v, w);
return distance;
}
private TreeNode nextLeft(TreeNode v) {
return tree.isLeaf(v) ? getThread(v) : tree.getFirstChild(v);
}
private TreeNode nextRight(TreeNode v) {
return tree.isLeaf(v) ? getThread(v) : tree.getLastChild(v);
}
/**
*
* @param node
* [tree.isChildOfParent(node, parentNode)]
* @param parentNode
* parent of node
* @return
*/
private int getNumber(TreeNode node, TreeNode parentNode) {
Integer n = number.get(node);
if (n == null) {
int i = 1;
for (TreeNode child : tree.getChildren(parentNode)) {
number.put(child, i++);
}
n = number.get(node);
}
return n.intValue();
}
/**
*
* @param vIMinus
* @param v
* @param parentOfV
* @param defaultAncestor
* @return the greatest distinct ancestor of vIMinus and its right neighbor
* v
*/
private TreeNode ancestor(TreeNode vIMinus, TreeNode v, TreeNode parentOfV,
TreeNode defaultAncestor) {
TreeNode ancestor = getAncestor(vIMinus);
// when the ancestor of vIMinus is a sibling of v (i.e. has the same
// parent as v) it is also the greatest distinct ancestor vIMinus and
// v. Otherwise it is the defaultAncestor
return tree.isChildOfParent(ancestor, parentOfV) ? ancestor
: defaultAncestor;
}
private void moveSubtree(TreeNode wMinus, TreeNode wPlus, TreeNode parent,
double shift) {
int subtrees = getNumber(wPlus, parent) - getNumber(wMinus, parent);
setChange(wPlus, getChange(wPlus) - shift / subtrees);
setShift(wPlus, getShift(wPlus) + shift);
setChange(wMinus, getChange(wMinus) + shift / subtrees);
setPrelim(wPlus, getPrelim(wPlus) + shift);
setMod(wPlus, getMod(wPlus) + shift);
}
/**
* In difference to the original algorithm we also pass in the leftSibling
* and the parent of v.
* <p>
* <b>Why adding the parameter 'parent of v' (parentOfV) ?</b>
* <p>
* In this method we need access to the parent of v. Not every tree
* implementation may support efficient (i.e. constant time) access to it.
* On the other hand the (only) caller of this method can provide this
* information with only constant extra time.
* <p>
* Also we need access to the "left most sibling" of v. Not every tree
* implementation may support efficient (i.e. constant time) access to it.
* On the other hand the "left most sibling" of v is also the "first child"
* of the parent of v. The first child of a parent node we can get in
* constant time. As we got the parent of v we can so also get the
* "left most sibling" of v in constant time.
* <p>
* <b>Why adding the parameter 'leftSibling' ?</b>
* <p>
* In this method we need access to the "left sibling" of v. Not every tree
* implementation may support efficient (i.e. constant time) access to it.
* However it is easy for the caller of this method to provide this
* information with only constant extra time.
* <p>
* <p>
* <p>
* In addition these extra parameters avoid the need for
* {@link TreeForTreeLayout} to include extra methods "getParent",
* "getLeftSibling", or "getLeftMostSibling". This keeps the interface
* {@link TreeForTreeLayout} small and avoids redundant implementations.
*
* @param v
* @param defaultAncestor
* @param leftSibling
* [nullable] the left sibling v, if there is any
* @param parentOfV
* the parent of v
* @return the (possibly changes) defaultAncestor
*/
private TreeNode apportion(TreeNode v, TreeNode defaultAncestor,
TreeNode leftSibling, TreeNode parentOfV) {
TreeNode w = leftSibling;
if (w == null) {
// v has no left sibling
return defaultAncestor;
}
// v has left sibling w
// The following variables "v..." are used to traverse the contours to
// the subtrees. "Minus" refers to the left, "Plus" to the right
// subtree. "I" refers to the "inside" and "O" to the outside contour.
TreeNode vOPlus = v;
TreeNode vIPlus = v;
TreeNode vIMinus = w;
// get leftmost sibling of vIPlus, i.e. get the leftmost sibling of
// v, i.e. the leftmost child of the parent of v (which is passed
// in)
TreeNode vOMinus = tree.getFirstChild(parentOfV);
Double sIPlus = getMod(vIPlus);
Double sOPlus = getMod(vOPlus);
Double sIMinus = getMod(vIMinus);
Double sOMinus = getMod(vOMinus);
TreeNode nextRightVIMinus = nextRight(vIMinus);
TreeNode nextLeftVIPlus = nextLeft(vIPlus);
while (nextRightVIMinus != null && nextLeftVIPlus != null) {
vIMinus = nextRightVIMinus;
vIPlus = nextLeftVIPlus;
vOMinus = nextLeft(vOMinus);
vOPlus = nextRight(vOPlus);
setAncestor(vOPlus, v);
double shift = (getPrelim(vIMinus) + sIMinus)
- (getPrelim(vIPlus) + sIPlus)
+ getDistance(vIMinus, vIPlus);
if (shift > 0) {
moveSubtree(ancestor(vIMinus, v, parentOfV, defaultAncestor),
v, parentOfV, shift);
sIPlus = sIPlus + shift;
sOPlus = sOPlus + shift;
}
sIMinus = sIMinus + getMod(vIMinus);
sIPlus = sIPlus + getMod(vIPlus);
sOMinus = sOMinus + getMod(vOMinus);
sOPlus = sOPlus + getMod(vOPlus);
nextRightVIMinus = nextRight(vIMinus);
nextLeftVIPlus = nextLeft(vIPlus);
}
if (nextRightVIMinus != null && nextRight(vOPlus) == null) {
setThread(vOPlus, nextRightVIMinus);
setMod(vOPlus, getMod(vOPlus) + sIMinus - sOPlus);
}
if (nextLeftVIPlus != null && nextLeft(vOMinus) == null) {
setThread(vOMinus, nextLeftVIPlus);
setMod(vOMinus, getMod(vOMinus) + sIPlus - sOMinus);
defaultAncestor = v;
}
return defaultAncestor;
}
/**
*
* @param v
* [!tree.isLeaf(v)]
*/
private void executeShifts(TreeNode v) {
double shift = 0;
double change = 0;
for (TreeNode w : tree.getChildrenReverse(v)) {
change = change + getChange(w);
setPrelim(w, getPrelim(w) + shift);
setMod(w, getMod(w) + shift);
shift = shift + getShift(w) + change;
}
}
/**
* In difference to the original algorithm we also pass in the leftSibling
* (see {@link #apportion(Object, Object, Object, Object)} for a
* motivation).
*
* @param v
* @param leftSibling
* [nullable] the left sibling v, if there is any
*/
private void firstWalk(TreeNode v, TreeNode leftSibling) {
if (tree.isLeaf(v)) {
// No need to set prelim(v) to 0 as the getter takes care of this.
TreeNode w = leftSibling;
if (w != null) {
// v has left sibling
setPrelim(v, getPrelim(w) + getDistance(v, w));
}
} else {
// v is not a leaf
TreeNode defaultAncestor = tree.getFirstChild(v);
TreeNode previousChild = null;
for (TreeNode w : tree.getChildren(v)) {
firstWalk(w, previousChild);
defaultAncestor = apportion(w, defaultAncestor, previousChild,
v);
previousChild = w;
}
executeShifts(v);
double midpoint = (getPrelim(tree.getFirstChild(v)) + getPrelim(tree
.getLastChild(v))) / 2.0;
TreeNode w = leftSibling;
if (w != null) {
// v has left sibling
setPrelim(v, getPrelim(w) + getDistance(v, w));
setMod(v, getPrelim(v) - midpoint);
} else {
// v has no left sibling
setPrelim(v, midpoint);
}
}
}
/**
* In difference to the original algorithm we also pass in extra level
* information.
*
* @param v
* @param m
* @param level
* @param levelStart
*/
private void secondWalk(TreeNode v, double m, int level, double levelStart) {
// construct the position from the prelim and the level information
// The rootLocation affects the way how x and y are changed and in what
// direction.
double levelChangeSign = getLevelChangeSign();
boolean levelChangeOnYAxis = isLevelChangeInYAxis();
double levelSize = getSizeOfLevel(level);
double x = getPrelim(v) + m;
double y;
AlignmentInLevel alignment = configuration.getAlignmentInLevel();
if (alignment == AlignmentInLevel.Center) {
y = levelStart + levelChangeSign * (levelSize / 2);
} else if (alignment == AlignmentInLevel.TowardsRoot) {
y = levelStart + levelChangeSign * (getNodeThickness(v) / 2);
} else {
y = levelStart + levelSize - levelChangeSign
* (getNodeThickness(v) / 2);
}
if (!levelChangeOnYAxis) {
double t = x;
x = y;
y = t;
}
positions.put(v, new NormalizedPosition(x, y));
// update the bounds
updateBounds(v, x, y);
// recurse
if (!tree.isLeaf(v)) {
double nextLevelStart = levelStart
+ (levelSize + configuration.getGapBetweenLevels(level + 1))
* levelChangeSign;
for (TreeNode w : tree.getChildren(v)) {
secondWalk(w, m + getMod(v), level + 1, nextLevelStart);
}
}
}
// ------------------------------------------------------------------------
// nodeBounds
private Map<TreeNode, Rectangle2D.Double> nodeBounds;
/**
* Returns the layout of the tree nodes by mapping each node of the tree to
* its bounds (position and size).
* <p>
* For each rectangle x and y will be >= 0. At least one rectangle will have
* an x == 0 and at least one rectangle will have an y == 0.
*
* @return maps each node of the tree to its bounds (position and size).
*/
public Map<TreeNode, Rectangle2D.Double> getNodeBounds() {
if (nodeBounds == null) {
nodeBounds = new HashMap<TreeNode, Rectangle2D.Double>();
for (Entry<TreeNode, Point2D> entry : positions.entrySet()) {
TreeNode node = entry.getKey();
Point2D pos = entry.getValue();
double w = getNodeWidth(node);
double h = getNodeHeight(node);
double x = pos.getX() - w / 2;
double y = pos.getY() - h / 2;
nodeBounds.put(node, new Rectangle2D.Double(x, y, w, h));
}
}
return nodeBounds;
}
// ------------------------------------------------------------------------
// constructor
/**
* Creates a TreeLayout for a given tree.
* <p>
* In addition to the tree the {@link NodeExtentProvider} and the
* {@link Configuration} must be given.
*/
public TreeLayout(TreeForTreeLayout<TreeNode> tree,
NodeExtentProvider<TreeNode> nodeExtentProvider,
Configuration<TreeNode> configuration) {
this.tree = tree;
this.nodeExtentProvider = nodeExtentProvider;
this.configuration = configuration;
// No need to explicitly set mod, thread and ancestor as their getters
// are taking care of the initial values. This avoids a full tree walk
// through and saves some memory as no entries are added for
// "initial values".
TreeNode r = tree.getRoot();
firstWalk(r, null);
calcSizeOfLevels(r, 0);
secondWalk(r, -getPrelim(r), 0, 0);
}
}
|
add checkTree and dumpTree to TreeLayout, to support debugging
|
org.abego.treelayout/src/main/java/org/abego/treelayout/TreeLayout.java
|
add checkTree and dumpTree to TreeLayout, to support debugging
|
|
Java
|
bsd-3-clause
|
dd091e5e88c1c322a4fb0227df359263f153d448
| 0
|
liry/gooddata-java,martiner/gooddata-java,standevgd/gooddata-java
|
/*
* Copyright (C) 2007-2015, GoodData(R) Corporation. All rights reserved.
*/
package com.gooddata.featureflag;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import org.springframework.web.util.UriTemplate;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import static com.gooddata.util.Validate.notEmpty;
import static org.springframework.util.Assert.notNull;
@JsonTypeInfo(include = JsonTypeInfo.As.WRAPPER_OBJECT, use = JsonTypeInfo.Id.NAME)
@JsonTypeName("featureFlags")
@JsonIgnoreProperties(ignoreUnknown = true)
public class FeatureFlags implements Iterable<FeatureFlag> {
public static final String AGGREGATED_FEATURE_FLAGS_URI = "/gdc/internal/projects/{projectId}/featureFlags";
public static final UriTemplate AGGREGATED_FEATURE_FLAGS_TEMPLATE = new UriTemplate(AGGREGATED_FEATURE_FLAGS_URI);
private final List<FeatureFlag> featureFlags = new LinkedList<>();
/* protected helper method for JSON deserialization */
@JsonAnySetter
protected void addFlag(final String name, final boolean enabled) {
notNull(name);
featureFlags.add(new FeatureFlag(name, enabled));
}
@Override
public Iterator<FeatureFlag> iterator() {
return featureFlags.iterator();
}
/**
* Returns true if the feature flag with given name exists and is enabled, false otherwise.
*
* @param flagName the name of feature flag
* @return true if the feature flag with given name exists and is enabled, false otherwise
*/
public boolean isEnabled(final String flagName) {
notEmpty(flagName, "flagName");
for (final FeatureFlag flag : featureFlags) {
if (flagName.equalsIgnoreCase(flag.getName())) {
return flag.isEnabled();
}
}
return false;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final FeatureFlags that = (FeatureFlags) o;
return !(featureFlags != null ? !featureFlags.equals(that.featureFlags) : that.featureFlags != null);
}
@Override
public int hashCode() {
return featureFlags != null ? featureFlags.hashCode() : 0;
}
@Override
public String toString() {
return "FeatureFlags{" + featureFlags + "}";
}
}
|
src/main/java/com/gooddata/featureflag/FeatureFlags.java
|
/*
* Copyright (C) 2007-2015, GoodData(R) Corporation. All rights reserved.
*/
package com.gooddata.featureflag;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import org.springframework.web.util.UriTemplate;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import static com.gooddata.util.Validate.notEmpty;
import static org.springframework.util.Assert.notNull;
@JsonTypeInfo(include = JsonTypeInfo.As.WRAPPER_OBJECT, use = JsonTypeInfo.Id.NAME)
@JsonTypeName("featureFlags")
@JsonIgnoreProperties(ignoreUnknown = true)
public class FeatureFlags implements Iterable<FeatureFlag> {
public static final String AGGREGATED_FEATURE_FLAGS_URI = "/gdc/internal/projects/{projectId}/featureFlags";
public static final UriTemplate AGGREGATED_FEATURE_FLAGS_TEMPLATE = new UriTemplate(AGGREGATED_FEATURE_FLAGS_URI);
private final List<FeatureFlag> featureFlags = new LinkedList<>();
/* protected helper method for JSON deserialization */
@JsonAnySetter
protected void addFlag(final String name, final boolean enabled) {
notNull(name);
featureFlags.add(new FeatureFlag(name, enabled));
}
/**
* @deprecated use {@link #isEnabled(String)} method or {@link Iterable} feature of this class
* @return list of feature flags
*/
@Deprecated
public List<FeatureFlag> getFeatureFlags() {
return featureFlags;
}
@Override
public Iterator<FeatureFlag> iterator() {
return featureFlags.iterator();
}
/**
* Returns true if the feature flag with given name exists and is enabled, false otherwise.
*
* @param flagName the name of feature flag
* @return true if the feature flag with given name exists and is enabled, false otherwise
*/
public boolean isEnabled(final String flagName) {
notEmpty(flagName, "flagName");
for (final FeatureFlag flag : featureFlags) {
if (flagName.equalsIgnoreCase(flag.getName())) {
return flag.isEnabled();
}
}
return false;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final FeatureFlags that = (FeatureFlags) o;
return !(featureFlags != null ? !featureFlags.equals(that.featureFlags) : that.featureFlags != null);
}
@Override
public int hashCode() {
return featureFlags != null ? featureFlags.hashCode() : 0;
}
@Override
public String toString() {
return "FeatureFlags{" + featureFlags + "}";
}
}
|
remove deprecated FeatureFlags.getFeatureFlags
use isEnabled(String) method or Iterable feature of FeatureFlags class
|
src/main/java/com/gooddata/featureflag/FeatureFlags.java
|
remove deprecated FeatureFlags.getFeatureFlags
|
|
Java
|
bsd-3-clause
|
f7b7ed2731354ec53e101798e5f5df22fed5891e
| 0
|
NCIP/cab2b,NCIP/cab2b,NCIP/cab2b
|
package edu.wustl.cab2b.client.ui.experiment;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Font;
import java.awt.Insets;
import java.rmi.RemoteException;
import javax.swing.JFrame;
import javax.swing.JSplitPane;
import edu.wustl.cab2b.common.domain.Experiment;
import edu.wustl.cab2b.common.ejb.EjbNamesConstants;
import edu.wustl.cab2b.common.exception.CheckedException;
import edu.wustl.cab2b.common.experiment.ExperimentBusinessInterface;
import edu.wustl.cab2b.common.experiment.ExperimentHome;
import edu.wustl.cab2b.client.ui.controls.Cab2bButton;
import edu.wustl.cab2b.client.ui.controls.Cab2bLabel;
import edu.wustl.cab2b.client.ui.controls.Cab2bPanel;
import edu.wustl.cab2b.client.ui.controls.Cab2bTitledPanel;
import edu.wustl.cab2b.client.ui.controls.CustomizableBorder;
import edu.wustl.cab2b.client.ui.util.CommonUtils;
import edu.wustl.common.tree.ExperimentTreeNode;
import edu.wustl.common.util.dbManager.DAOException;
/**
* A panel to display details of the selected experiment
* @author Deepak_Shingan
*/
public class ExperimentOpenPanel extends Cab2bTitledPanel {
/*Left hand side stack box used to display data category, Filter data, Analysed data*/
ExperimentStackBox experimentStackBox;
/*Panel to display experiment details for selected data category node*/
ExperimentDataCategoryGridPanel experimentDataCategoryGridPanel = null;
/*Panel to display experiment details like Experiment Name, Created Date, Last Modified Date */
Cab2bPanel experimentTitlePanel;
Cab2bButton addDataButton;
/*user selected experiment object*/
Experiment selectedExperiment = null;
ExperimentTreeNode m_ExperimentTreeNodeObj;
ExperimentDetailsPanel m_parentPanel;
/*experiment BusinessInterface*/
ExperimentBusinessInterface expBus;
JSplitPane splitPane;
public ExperimentOpenPanel()
{
initGUI();
}
public ExperimentOpenPanel(ExperimentTreeNode Obj, ExperimentDetailsPanel parentPanel)
{
m_ExperimentTreeNodeObj = Obj;
m_parentPanel = parentPanel;
initGUI();
}
public ExperimentDataCategoryGridPanel getExperimentDataCategoryGridPanel()
{
return this.experimentDataCategoryGridPanel;
}
public void initGUI()
{
this.setLayout(new BorderLayout());
/* ejb code : Getting experiment BusinessInterface*/
expBus = (ExperimentBusinessInterface) CommonUtils
.getBusinessInterface(EjbNamesConstants.EXPERIMENT, ExperimentHome.class);
try {
selectedExperiment = expBus.getExperiment(m_ExperimentTreeNodeObj.getIdentifier());
} catch (RemoteException e) {
CheckedException checkedException = new CheckedException(e.getMessage());
CommonUtils.handleException(checkedException, m_parentPanel, true, true,
true, false);
e.printStackTrace();
} catch (DAOException e) {
CheckedException checkedException = new CheckedException(e.getMessage());
CommonUtils.handleException(checkedException,m_parentPanel, true, true,
true, false);
e.printStackTrace();
}
//ejb code end
experimentTitlePanel = new Cab2bPanel();
/*Adding Experiment name*/
Cab2bLabel experimentLabel = new Cab2bLabel(selectedExperiment.getName());
experimentLabel.setForeground(Color.blue);
Font textFont = new Font (experimentLabel.getFont().getName(),Font.BOLD,experimentLabel.getFont().getSize()+4);
experimentLabel.setFont(textFont);
experimentTitlePanel.add("br br hfill",experimentLabel);
/*Adding Experiment Creation Date*/
Cab2bLabel experimentCreatedOn = new Cab2bLabel("Created On :" + selectedExperiment.getCreatedOn().toString());
experimentTitlePanel.add("tab tab tab tab hfill ",experimentCreatedOn);
/*Adding Experiment Last modification Date*/
Cab2bLabel experimentModifiedOn = new Cab2bLabel("Last Updated :"+ selectedExperiment.getLastUpdatedOn().toString());
experimentTitlePanel.add("tab tab tab tab hfill",experimentModifiedOn);
addDataButton = new Cab2bButton("Add Data");
addDataButton.setEnabled(false);
experimentTitlePanel.add(addDataButton);
/*Adding Experiment grid panel*/
experimentDataCategoryGridPanel = new ExperimentDataCategoryGridPanel();
experimentDataCategoryGridPanel.setBorder(new CustomizableBorder(new Insets(1,1,1,1), true, true));
/*Adding Experiment Stack box panel*/
this.add(experimentTitlePanel, BorderLayout.NORTH );
experimentStackBox = new ExperimentStackBox(expBus,selectedExperiment,experimentDataCategoryGridPanel);
experimentStackBox.setBorder(new CustomizableBorder(new Insets(1,1,1,1), true, true));
splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, experimentStackBox, experimentDataCategoryGridPanel);
splitPane.setOneTouchExpandable(true);
splitPane.setDividerLocation(0.2D);
this.add(splitPane);
}
public static void main(String str[])
{
ExperimentOpenPanel expDetPanel = null;
expDetPanel = new ExperimentOpenPanel();
JFrame frame = new JFrame("Experiment");
frame.setSize(600,600);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().add(expDetPanel);
frame.setVisible(true);
}
}
|
source/client/main/edu/wustl/cab2b/client/ui/experiment/ExperimentOpenPanel.java
|
package edu.wustl.cab2b.client.ui.experiment;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Font;
import java.awt.Insets;
import java.rmi.RemoteException;
import javax.swing.JFrame;
import javax.swing.JSplitPane;
import edu.wustl.cab2b.common.domain.Experiment;
import edu.wustl.cab2b.common.ejb.EjbNamesConstants;
import edu.wustl.cab2b.common.exception.CheckedException;
import edu.wustl.cab2b.common.experiment.ExperimentBusinessInterface;
import edu.wustl.cab2b.common.experiment.ExperimentHome;
import edu.wustl.cab2b.client.ui.controls.Cab2bButton;
import edu.wustl.cab2b.client.ui.controls.Cab2bLabel;
import edu.wustl.cab2b.client.ui.controls.Cab2bPanel;
import edu.wustl.cab2b.client.ui.controls.Cab2bTitledPanel;
import edu.wustl.cab2b.client.ui.controls.CustomizableBorder;
import edu.wustl.cab2b.client.ui.util.CommonUtils;
import edu.wustl.common.tree.ExperimentTreeNode;
import edu.wustl.common.util.dbManager.DAOException;
/**
* A panel to display details of the selected experiment
* @author Deepak_Shingan
*/
public class ExperimentOpenPanel extends Cab2bTitledPanel {
/*Left hand side stack box used to display data category, Filter data, Analysed data*/
ExperimentStackBox experimentStackBox;
/*Panel to display experiment details for selected data category node*/
ExperimentDataCategoryGridPanel experimentDataCategoryGridPanel = null;
/*Panel to display experiment details like Experiment Name, Created Date, Last Modified Date */
Cab2bPanel experimentTitlePanel;
Cab2bButton addDataButton;
/*user selected experiment object*/
Experiment selectedExperiment = null;
ExperimentTreeNode m_ExperimentTreeNodeObj;
ExperimentDetailsPanel m_parentPanel;
/*experiment BusinessInterface*/
ExperimentBusinessInterface expBus;
JSplitPane splitPane;
public ExperimentOpenPanel()
{
initGUI();
}
public ExperimentOpenPanel(ExperimentTreeNode Obj, ExperimentDetailsPanel parentPanel)
{
m_ExperimentTreeNodeObj = Obj;
m_parentPanel = parentPanel;
initGUI();
}
public void initGUI()
{
this.setLayout(new BorderLayout());
/* ejb code : Getting experiment BusinessInterface*/
expBus = (ExperimentBusinessInterface) CommonUtils
.getBusinessInterface(EjbNamesConstants.EXPERIMENT, ExperimentHome.class);
try {
selectedExperiment = expBus.getExperiment(m_ExperimentTreeNodeObj.getIdentifier());
} catch (RemoteException e) {
CheckedException checkedException = new CheckedException(e.getMessage());
CommonUtils.handleException(checkedException, m_parentPanel, true, true,
true, false);
e.printStackTrace();
} catch (DAOException e) {
CheckedException checkedException = new CheckedException(e.getMessage());
CommonUtils.handleException(checkedException,m_parentPanel, true, true,
true, false);
e.printStackTrace();
}
//ejb code end
experimentTitlePanel = new Cab2bPanel();
/*Adding Experiment name*/
Cab2bLabel experimentLabel = new Cab2bLabel(selectedExperiment.getName());
experimentLabel.setForeground(Color.blue);
Font textFont = new Font (experimentLabel.getFont().getName(),Font.BOLD,experimentLabel.getFont().getSize()+4);
experimentLabel.setFont(textFont);
experimentTitlePanel.add("br br hfill",experimentLabel);
/*Adding Experiment Creation Date*/
Cab2bLabel experimentCreatedOn = new Cab2bLabel("Created On :" + selectedExperiment.getCreatedOn().toString());
experimentTitlePanel.add("tab tab tab tab hfill ",experimentCreatedOn);
/*Adding Experiment Last modification Date*/
Cab2bLabel experimentModifiedOn = new Cab2bLabel("Last Updated :"+ selectedExperiment.getLastUpdatedOn().toString());
experimentTitlePanel.add("tab tab tab tab hfill",experimentModifiedOn);
addDataButton = new Cab2bButton("Add Data");
experimentTitlePanel.add(addDataButton);
/*Adding Experiment Stack box panel*/
this.add(experimentTitlePanel, BorderLayout.NORTH );
experimentStackBox = new ExperimentStackBox(expBus,selectedExperiment);
experimentStackBox.setBorder(new CustomizableBorder(new Insets(1,1,1,1), true, true));
/*Adding Experiment grid panel*/
experimentDataCategoryGridPanel = new ExperimentDataCategoryGridPanel();
experimentDataCategoryGridPanel.setBorder(new CustomizableBorder(new Insets(1,1,1,1), true, true));
splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, experimentStackBox, experimentDataCategoryGridPanel);
splitPane.setOneTouchExpandable(true);
splitPane.setDividerLocation(0.2D);
this.add(splitPane);
}
public static void main(String str[])
{
ExperimentOpenPanel expDetPanel = null;
expDetPanel = new ExperimentOpenPanel();
JFrame frame = new JFrame("Experiment");
frame.setSize(600,600);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().add(expDetPanel);
frame.setVisible(true);
}
}
|
added getExperimentDataCategoryGridPanel() method
|
source/client/main/edu/wustl/cab2b/client/ui/experiment/ExperimentOpenPanel.java
|
added getExperimentDataCategoryGridPanel() method
|
|
Java
|
mit
|
1285319fc80b4a99aceb883c014ca1675fb242ac
| 0
|
ctlab/sgmwcs-solver,ctlab/gmwcs-solver,ctlab/sgmwcs-solver
|
package ru.ifmo.ctddev.gmwcs;
import ilog.concert.IloException;
import org.jgrapht.UndirectedGraph;
import org.jgrapht.alg.BiconnectivityInspector;
import org.jgrapht.alg.ConnectivityInspector;
import org.jgrapht.graph.SimpleGraph;
import org.jgrapht.graph.UndirectedSubgraph;
import java.util.*;
public abstract class Solver {
protected int threads;
protected abstract List<Unit> solveBiComponent(UndirectedGraph<Node, Edge> graph, Node root, double tl) throws IloException;
public List<Unit> solve(UndirectedGraph<Node, Edge> graph, int threads, double tl) throws IloException {
List<Unit> best = null;
this.threads = threads;
double maxWeight = 0.0;
ConnectivityInspector<Node, Edge> inspector = new ConnectivityInspector<>(graph);
TimeLimiter limiter = new TimeLimiter(tl);
int nodeRemains = graph.vertexSet().size();
List<Set<Node>> connectedSets = new ArrayList<>();
connectedSets.addAll(inspector.connectedSets());
Collections.sort(connectedSets, new SetComparator<Node>());
for (Set<Node> component : connectedSets) {
double fraction = (limiter.getRemainingTime() / nodeRemains) * component.size();
nodeRemains -= component.size();
Set<Edge> edges = new LinkedHashSet<>();
for (Edge edge : graph.edgeSet()) {
if (component.contains(graph.getEdgeSource(edge))) {
edges.add(edge);
}
}
UndirectedGraph<Node, Edge> subgraph = new UndirectedSubgraph<>(graph, component, edges);
List<Unit> solution = solveComponent(clone(subgraph), tl < 0 ? -1 : fraction, limiter);
if (sum(solution) > maxWeight) {
maxWeight = sum(solution);
best = solution;
}
}
checkConnectivity(graph, best);
return best;
}
private List<Unit> solveComponent(UndirectedGraph<Node, Edge> graph, double tl,
TimeLimiter limiter) throws IloException {
BiconnectivityInspector<Node, Edge> inspector = new BiconnectivityInspector<>(graph);
Set<Node> cutpoints = inspector.getCutpoints();
List<Set<Node>> components = new ArrayList<>();
components.addAll(inspector.getBiconnectedVertexComponents());
Collections.sort(components, new SetComparator<Node>());
List<Unit> best = null;
int nodeRemains = graph.vertexSet().size() + components.size() - 1;
while (components.size() > 1) {
Set<Node> component = null;
Node cutpoint = null;
for (Set<Node> comp : components) {
int cutNodes = 0;
for (Node point : cutpoints) {
if (comp.contains(point)) {
cutNodes++;
cutpoint = point;
if (cutNodes == 2) {
break;
}
}
}
if (cutNodes == 1) {
component = new LinkedHashSet<>();
component.addAll(comp);
break;
}
}
if (component.contains(new Node(1223, 0.0))) {
int hh = 1;
}
components.remove(component);
int cnt = 0;
for (Set<Node> comp : components) {
if (comp.contains(cutpoint)) {
cnt++;
if (cnt > 1) {
break;
}
}
}
if (cnt < 2) {
cutpoints.remove(cutpoint);
}
Set<Edge> edgeSet = new LinkedHashSet<>();
for (Edge edge : graph.edgeSet()) {
Node from = graph.getEdgeSource(edge);
Node to = graph.getEdgeTarget(edge);
if (component.contains(from) && component.contains(to)) {
edgeSet.add(edge);
}
}
double fraction = ((tl / nodeRemains) * component.size()) / 2;
UndirectedGraph<Node, Edge> subgraph = new UndirectedSubgraph<>(graph, component, edgeSet);
long timeBefore = System.currentTimeMillis();
List<Unit> unrooted = solveBiComponent(subgraph, null, tl < 0 ? -1 : fraction);
if (sum(unrooted) > sum(best)) {
best = getResult(unrooted);
}
List<Unit> rooted;
if (!unrooted.contains(cutpoint)) {
rooted = solveBiComponent(subgraph, cutpoint, tl < 0 ? -1 : fraction);
} else {
rooted = unrooted;
}
long timeAfter = System.currentTimeMillis();
if (tl > 0) {
nodeRemains -= component.size();
double spent = Math.min((timeAfter - timeBefore) / 1000.0, fraction * 2);
limiter.spend(spent);
tl -= spent;
}
for (Node node : component) {
if (node != cutpoint) {
graph.removeVertex(node);
}
}
if (rooted != null) {
for (Unit unit : rooted) {
if (unit != cutpoint) {
cutpoint.addAllAbsorbedUnits(unit.getAbsorbedUnits());
cutpoint.setWeight(cutpoint.getWeight() + unit.getWeight());
}
}
}
}
long timeBefore = System.currentTimeMillis();
List<Unit> unrooted = solveBiComponent(graph, null, tl < 0 ? -1 : tl);
long timeAfter = System.currentTimeMillis();
if (tl > 0) {
limiter.spend(Math.min((timeAfter - timeBefore) / 1000.0, tl));
}
if (sum(unrooted) > sum(best)) {
best = getResult(unrooted);
}
return best;
}
private void checkConnectivity(UndirectedGraph<Node, Edge> graph, List<Unit> result) {
Set<Node> nodes = new LinkedHashSet<>();
Set<Edge> edges = new LinkedHashSet<>();
if (result == null) {
return;
}
for (Unit unit : result) {
if (unit instanceof Node) {
nodes.add((Node) unit);
} else {
edges.add((Edge) unit);
}
}
UndirectedGraph<Node, Edge> subgraph = new UndirectedSubgraph<>(graph, nodes, edges);
ConnectivityInspector<Node, Edge> inspector = new ConnectivityInspector<>(subgraph);
if (!inspector.isGraphConnected()) {
throw new IllegalStateException();
}
}
private List<Unit> getResult(List<Unit> units) {
if (units == null) {
return null;
}
List<Unit> result = new ArrayList<>();
for (Unit unit : units) {
result.addAll(unit.getAbsorbedUnits());
}
return result;
}
private UndirectedGraph<Node, Edge> clone(UndirectedGraph<Node, Edge> source) {
UndirectedGraph<Node, Edge> graph = new SimpleGraph<>(Edge.class);
Map<Node, Node> old2new = new LinkedHashMap<>();
for (Node node : source.vertexSet()) {
Node newNode = new Node(node.getNum(), node.getWeight());
newNode.addAbsorbedUnit(node);
old2new.put(node, newNode);
graph.addVertex(newNode);
}
for (Edge edge : source.edgeSet()) {
Node from = old2new.get(source.getEdgeSource(edge));
Node to = old2new.get(source.getEdgeTarget(edge));
Edge newEdge = new Edge(edge.getNum(), edge.getWeight());
newEdge.addAbsorbedUnit(edge);
graph.addEdge(from, to, newEdge);
}
return graph;
}
private double sum(List<Unit> units) {
if (units == null) {
return 0;
}
double res = 0;
for (Unit unit : units) {
res += unit.getWeight();
}
return res;
}
private class SetComparator<E> implements Comparator<Set<E>> {
@Override
public int compare(Set<E> o1, Set<E> o2) {
if (o1.size() < o2.size()) {
return -1;
}
if (o1.size() > o2.size()) {
return 1;
}
return 0;
}
}
private class TimeLimiter {
private double tl;
public TimeLimiter(double tl) {
this.tl = tl;
}
public void spend(double time) {
tl -= time;
}
public double getRemainingTime() {
return tl;
}
}
}
|
src/main/java/ru/ifmo/ctddev/gmwcs/Solver.java
|
package ru.ifmo.ctddev.gmwcs;
import ilog.concert.IloException;
import org.jgrapht.UndirectedGraph;
import org.jgrapht.alg.BiconnectivityInspector;
import org.jgrapht.alg.ConnectivityInspector;
import org.jgrapht.graph.SimpleGraph;
import org.jgrapht.graph.UndirectedSubgraph;
import java.util.*;
public abstract class Solver {
protected int threads;
protected abstract List<Unit> solveBiComponent(UndirectedGraph<Node, Edge> graph, Node root, double tl) throws IloException;
public List<Unit> solve(UndirectedGraph<Node, Edge> graph, int threads, double tl) throws IloException {
List<Unit> best = null;
this.threads = threads;
double maxWeight = 0.0;
ConnectivityInspector<Node, Edge> inspector = new ConnectivityInspector<>(graph);
TimeLimiter limiter = new TimeLimiter(tl);
int nodeRemains = graph.vertexSet().size();
List<Set<Node>> connectedSets = new ArrayList<>();
connectedSets.addAll(inspector.connectedSets());
Collections.sort(connectedSets, new SetComparator<Node>());
for (Set<Node> component : connectedSets) {
double fraction = (limiter.getRemainingTime() / nodeRemains) * component.size();
nodeRemains -= component.size();
Set<Edge> edges = new LinkedHashSet<>();
for (Edge edge : graph.edgeSet()) {
if (component.contains(graph.getEdgeSource(edge))) {
edges.add(edge);
}
}
UndirectedGraph<Node, Edge> subgraph = new UndirectedSubgraph<>(graph, component, edges);
List<Unit> solution = solveComponent(clone(subgraph), tl < 0 ? -1 : fraction, limiter);
if (sum(solution) > maxWeight) {
maxWeight = sum(solution);
best = solution;
}
}
checkConnectivity(graph, best);
return best;
}
private List<Unit> solveComponent(UndirectedGraph<Node, Edge> graph, double tl,
TimeLimiter limiter) throws IloException {
BiconnectivityInspector<Node, Edge> inspector = new BiconnectivityInspector<>(graph);
Set<Node> cutpoints = inspector.getCutpoints();
List<Set<Node>> components = new ArrayList<>();
components.addAll(inspector.getBiconnectedVertexComponents());
Collections.sort(components, new SetComparator<Node>());
List<Unit> best = null;
int nodeRemains = graph.vertexSet().size() + components.size() - 1;
while (components.size() > 1) {
Set<Node> component = null;
Node cutpoint = null;
for (Set<Node> comp : components) {
int cutNodes = 0;
for (Node point : cutpoints) {
if (comp.contains(point)) {
cutNodes++;
cutpoint = point;
if (cutNodes == 2) {
break;
}
}
}
if (cutNodes == 1) {
component = new LinkedHashSet<>();
component.addAll(comp);
break;
}
}
if (component.contains(new Node(1223, 0.0))) {
int hh = 1;
}
components.remove(component);
int cnt = 0;
for (Set<Node> comp : components) {
if (comp.contains(cutpoint)) {
cnt++;
if (cnt > 1) {
break;
}
}
}
if (cnt < 2) {
cutpoints.remove(cutpoint);
}
Set<Edge> edgeSet = new LinkedHashSet<>();
for (Edge edge : graph.edgeSet()) {
Node from = graph.getEdgeSource(edge);
Node to = graph.getEdgeTarget(edge);
if (component.contains(from) && component.contains(to)) {
edgeSet.add(edge);
}
}
double fraction = ((tl / nodeRemains) * component.size()) / 2;
UndirectedGraph<Node, Edge> subgraph = new UndirectedSubgraph<>(graph, component, edgeSet);
long timeBefore = System.currentTimeMillis();
List<Unit> unrooted = solveBiComponent(subgraph, null, tl < 0 ? -1 : fraction);
if (sum(unrooted) > sum(best)) {
best = getResult(unrooted);
}
List<Unit> rooted;
if (!unrooted.contains(cutpoint)) {
rooted = solveBiComponent(subgraph, cutpoint, tl < 0 ? -1 : fraction);
} else {
rooted = unrooted;
}
long timeAfter = System.currentTimeMillis();
if (tl > 0) {
nodeRemains -= component.size();
double spent = Math.min((timeAfter - timeBefore) / 1000.0, fraction * 2);
limiter.spend(spent);
tl -= spent;
}
for (Node node : component) {
if (node != cutpoint) {
graph.removeVertex(node);
}
}
if (rooted != null) {
for (Unit unit : rooted) {
if (unit != cutpoint) {
cutpoint.addAbsorbedUnit(unit);
cutpoint.addAllAbsorbedUnits(unit.getAbsorbedUnits());
cutpoint.setWeight(cutpoint.getWeight() + unit.getWeight());
}
}
}
}
long timeBefore = System.currentTimeMillis();
List<Unit> unrooted = solveBiComponent(graph, null, tl < 0 ? -1 : tl);
long timeAfter = System.currentTimeMillis();
if (tl > 0) {
limiter.spend(Math.min((timeAfter - timeBefore) / 1000.0, tl));
}
if (sum(unrooted) > sum(best)) {
best = getResult(unrooted);
}
return best;
}
private void checkConnectivity(UndirectedGraph<Node, Edge> graph, List<Unit> result) {
Set<Node> nodes = new LinkedHashSet<>();
Set<Edge> edges = new LinkedHashSet<>();
if (result == null) {
return;
}
for (Unit unit : result) {
if (unit instanceof Node) {
nodes.add((Node) unit);
} else {
edges.add((Edge) unit);
}
}
UndirectedGraph<Node, Edge> subgraph = new UndirectedSubgraph<>(graph, nodes, edges);
ConnectivityInspector<Node, Edge> inspector = new ConnectivityInspector<>(subgraph);
if (!inspector.isGraphConnected()) {
throw new IllegalStateException();
}
}
private List<Unit> getResult(List<Unit> units) {
if (units == null) {
return null;
}
List<Unit> result = new ArrayList<>();
for (Unit unit : units) {
result.addAll(unit.getAbsorbedUnits());
}
return result;
}
private UndirectedGraph<Node, Edge> clone(UndirectedGraph<Node, Edge> source) {
UndirectedGraph<Node, Edge> graph = new SimpleGraph<>(Edge.class);
Map<Node, Node> old2new = new LinkedHashMap<>();
for (Node node : source.vertexSet()) {
Node newNode = new Node(node.getNum(), node.getWeight());
newNode.addAbsorbedUnit(node);
old2new.put(node, newNode);
graph.addVertex(newNode);
}
for (Edge edge : source.edgeSet()) {
Node from = old2new.get(source.getEdgeSource(edge));
Node to = old2new.get(source.getEdgeTarget(edge));
Edge newEdge = new Edge(edge.getNum(), edge.getWeight());
newEdge.addAbsorbedUnit(edge);
graph.addEdge(from, to, newEdge);
}
return graph;
}
private double sum(List<Unit> units) {
if (units == null) {
return 0;
}
double res = 0;
for (Unit unit : units) {
res += unit.getWeight();
}
return res;
}
private class SetComparator<E> implements Comparator<Set<E>> {
@Override
public int compare(Set<E> o1, Set<E> o2) {
if (o1.size() < o2.size()) {
return -1;
}
if (o1.size() > o2.size()) {
return 1;
}
return 0;
}
}
private class TimeLimiter {
private double tl;
public TimeLimiter(double tl) {
this.tl = tl;
}
public void spend(double time) {
tl -= time;
}
public double getRemainingTime() {
return tl;
}
}
}
|
Bug with absorbance fixed
|
src/main/java/ru/ifmo/ctddev/gmwcs/Solver.java
|
Bug with absorbance fixed
|
|
Java
|
mit
|
51a6950bad1d6684a3dac0a11841cc1b017d91ea
| 0
|
amuniz/dry-plugin,jenkinsci/dry-plugin,recena/dry-plugin,recena/dry-plugin,amuniz/dry-plugin,jenkinsci/dry-plugin,recena/dry-plugin,jenkinsci/dry-plugin,amuniz/dry-plugin
|
package hudson.plugins.dry.parser;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import com.google.common.collect.Sets;
import edu.umd.cs.findbugs.annotations.SuppressWarnings;
import hudson.FilePath;
import hudson.plugins.analysis.core.AnnotationParser;
import hudson.plugins.analysis.util.ContextHashCode;
import hudson.plugins.analysis.util.SaxSetup;
import hudson.plugins.analysis.util.model.FileAnnotation;
import hudson.plugins.dry.parser.cpd.CpdParser;
import hudson.plugins.dry.parser.dupfinder.DupFinderParser;
import hudson.plugins.dry.parser.simian.SimianParser;
/**
* Registry for duplication parsers.
*
* @author Ulli Hafner
*/
// CHECKSTYLE:COUPLING-OFF
public class DuplicationParserRegistry implements AnnotationParser {
// CHECKSTYLE:COUPLING-ON
private static final long serialVersionUID = -8114361417348412242L;
@SuppressWarnings("SE")
private final List<AbstractDryParser> parsers = new ArrayList<AbstractDryParser>();
private String workspacePath;
private final String defaultEncoding;
/**
* Creates a new instance of {@link DuplicationParserRegistry}.
*
* @param highThreshold
* minimum number of duplicate lines for high priority warnings
* @param normalThreshold
* minimum number of duplicate lines for normal priority warnings
* @param defaultEncoding
* default encoding of the files
*/
public DuplicationParserRegistry(final int normalThreshold, final int highThreshold, final String defaultEncoding) {
this.defaultEncoding = defaultEncoding;
parsers.add(new CpdParser(highThreshold, normalThreshold));
parsers.add(new CpdParser(highThreshold, normalThreshold, false));
parsers.add(new SimianParser(highThreshold, normalThreshold));
parsers.add(new DupFinderParser(highThreshold, normalThreshold));
}
/**
* Creates a new instance of {@link DuplicationParserRegistry}.
*
* @param highThreshold
* minimum number of duplicate lines for high priority warnings
* @param normalThreshold
* minimum number of duplicate lines for normal priority warnings
* @param workspacePath
* path to the workspace files
* @param defaultEncoding
* default encoding of the files
*/
public DuplicationParserRegistry(final int normalThreshold, final int highThreshold,
final String workspacePath, final String defaultEncoding) {
this(normalThreshold, highThreshold, defaultEncoding);
this.workspacePath = workspacePath;
}
@Override
public Collection<FileAnnotation> parse(final File file, final String moduleName) throws InvocationTargetException {
SaxSetup sax = new SaxSetup();
FileInputStream inputStream = null;
try {
for (AbstractDryParser parser : parsers) {
inputStream = new FileInputStream(file);
if (parser.accepts(inputStream)) {
IOUtils.closeQuietly(inputStream);
inputStream = new FileInputStream(file);
Collection<DuplicateCode> result = parser.parse(inputStream, moduleName);
createLinkNames(result);
Set<FileAnnotation> warnings = Sets.newHashSet();
warnings.addAll(result);
ContextHashCode hashCode = new ContextHashCode();
for (FileAnnotation duplication : warnings) {
String fullPath = getFullPath(duplication);
long value = hashCode.create(fullPath,
duplication.getPrimaryLineNumber(), defaultEncoding);
duplication.setContextHashCode(value * 31 + (((DuplicateCode)duplication).getNumberOfLines()));
}
return warnings;
}
}
throw new IOException("No parser found for duplicated code results file " + file.getAbsolutePath());
}
catch (IOException exception) {
throw new InvocationTargetException(exception);
}
finally {
sax.cleanup();
IOUtils.closeQuietly(inputStream);
}
}
/**
* Gets full file path.
*
* @param annotation
* the annotation
* @return results full file path
*/
private String getFullPath(final FileAnnotation annotation) {
String fileName = annotation.getFileName();
File file = new File(fileName);
if (file.isAbsolute()) {
return fileName;
}
else {
FilePath filePath = new FilePath(new File(workspacePath));
FilePath fullPath = filePath.child(fileName);
return fullPath.getRemote();
}
}
/**
* For each duplicate code annotation that does not have a package name
* (i.e., for non Java sources), a link name is generated.
*
* @param result the annotations
*/
private void createLinkNames(final Collection<DuplicateCode> result) {
if (workspacePath != null) {
for (FileAnnotation duplication : result) {
duplication.setPathName(workspacePath);
}
}
}
}
|
src/main/java/hudson/plugins/dry/parser/DuplicationParserRegistry.java
|
package hudson.plugins.dry.parser;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import com.google.common.collect.Sets;
import edu.umd.cs.findbugs.annotations.SuppressWarnings;
import hudson.FilePath;
import hudson.plugins.analysis.core.AnnotationParser;
import hudson.plugins.analysis.util.ContextHashCode;
import hudson.plugins.analysis.util.SaxSetup;
import hudson.plugins.analysis.util.model.FileAnnotation;
import hudson.plugins.dry.parser.cpd.CpdParser;
import hudson.plugins.dry.parser.dupfinder.DupFinderParser;
import hudson.plugins.dry.parser.simian.SimianParser;
/**
* Registry for duplication parsers.
*
* @author Ulli Hafner
*/
// CHECKSTYLE:COUPLING-OFF
public class DuplicationParserRegistry implements AnnotationParser {
// CHECKSTYLE:COUPLING-ON
private static final long serialVersionUID = -8114361417348412242L;
@SuppressWarnings("SE")
private final List<AbstractDryParser> parsers = new ArrayList<AbstractDryParser>();
private String workspacePath;
private final String defaultEncoding;
/**
* Creates a new instance of {@link DuplicationParserRegistry}.
*
* @param highThreshold
* minimum number of duplicate lines for high priority warnings
* @param normalThreshold
* minimum number of duplicate lines for normal priority warnings
* @param defaultEncoding
* default encoding of the files
*/
public DuplicationParserRegistry(final int normalThreshold, final int highThreshold, final String defaultEncoding) {
this.defaultEncoding = defaultEncoding;
parsers.add(new CpdParser(highThreshold, normalThreshold));
parsers.add(new CpdParser(highThreshold, normalThreshold, false));
parsers.add(new SimianParser(highThreshold, normalThreshold));
parsers.add(new DupFinderParser(highThreshold, normalThreshold));
}
/**
* Creates a new instance of {@link DuplicationParserRegistry}.
*
* @param highThreshold
* minimum number of duplicate lines for high priority warnings
* @param normalThreshold
* minimum number of duplicate lines for normal priority warnings
* @param workspacePath
* path to the workspace files
* @param defaultEncoding
* default encoding of the files
*/
public DuplicationParserRegistry(final int normalThreshold, final int highThreshold,
final String workspacePath, final String defaultEncoding) {
this(normalThreshold, highThreshold, defaultEncoding);
this.workspacePath = workspacePath;
}
@Override
public Collection<FileAnnotation> parse(final File file, final String moduleName) throws InvocationTargetException {
SaxSetup sax = new SaxSetup();
FileInputStream inputStream = null;
try {
for (AbstractDryParser parser : parsers) {
inputStream = new FileInputStream(file);
if (parser.accepts(inputStream)) {
IOUtils.closeQuietly(inputStream);
inputStream = new FileInputStream(file);
Collection<DuplicateCode> result = parser.parse(inputStream, moduleName);
createLinkNames(result);
Set<FileAnnotation> warnings = Sets.newHashSet();
warnings.addAll(result);
ContextHashCode hashCode = new ContextHashCode();
for (FileAnnotation duplication : warnings) {
String fullPath = getFullPath(duplication);
long value = hashCode.create(fullPath,
duplication.getPrimaryLineNumber(), defaultEncoding);
duplication.setContextHashCode(value);
}
return warnings;
}
}
throw new IOException("No parser found for duplicated code results file " + file.getAbsolutePath());
}
catch (IOException exception) {
throw new InvocationTargetException(exception);
}
finally {
sax.cleanup();
IOUtils.closeQuietly(inputStream);
}
}
/**
* Gets full file path.
*
* @param annotation
* the annotation
* @return results full file path
*/
private String getFullPath(final FileAnnotation annotation) {
String fileName = annotation.getFileName();
File file = new File(fileName);
if (file.isAbsolute()) {
return fileName;
}
else {
FilePath filePath = new FilePath(new File(workspacePath));
FilePath fullPath = filePath.child(fileName);
return fullPath.getRemote();
}
}
/**
* For each duplicate code annotation that does not have a package name
* (i.e., for non Java sources), a link name is generated.
*
* @param result the annotations
*/
private void createLinkNames(final Collection<DuplicateCode> result) {
if (workspacePath != null) {
for (FileAnnotation duplication : result) {
duplication.setPathName(workspacePath);
}
}
}
}
|
[FIXED JENKINS-24873] Use warning type in hash code computation.
|
src/main/java/hudson/plugins/dry/parser/DuplicationParserRegistry.java
|
[FIXED JENKINS-24873] Use warning type in hash code computation.
|
|
Java
|
mit
|
30bb5950f5a8e73ff5a75c5189e4c91584906c1d
| 0
|
jvoegele/pulse,jvoegele/pulse,fluxroot/pulse,jvoegele/pulse,fluxroot/pulse
|
/*
* Copyright 2013-2014 the original author or authors.
*
* This file is part of Pulse Chess.
*
* Pulse Chess is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Pulse Chess is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Pulse Chess. If not, see <http://www.gnu.org/licenses/>.
*/
package com.fluxchess.pulse;
import com.fluxchess.jcpi.models.GenericBoard;
import com.fluxchess.jcpi.models.GenericColor;
import com.fluxchess.jcpi.models.GenericFile;
import com.fluxchess.jcpi.models.GenericPiece;
import java.security.SecureRandom;
import static com.fluxchess.pulse.Castling.KINGSIDE;
import static com.fluxchess.pulse.Castling.QUEENSIDE;
import static com.fluxchess.pulse.Color.BLACK;
import static com.fluxchess.pulse.Color.WHITE;
/**
* This is our internal board.
*/
public final class Board {
private static final int MAX_GAMEMOVES = Search.MAX_PLY + 1024;
private static final int BOARDSIZE = 128;
final int[] board = new int[BOARDSIZE];
final Bitboard[] pawns = new Bitboard[Color.values.length];
final Bitboard[] knights = new Bitboard[Color.values.length];
final Bitboard[] bishops = new Bitboard[Color.values.length];
final Bitboard[] rooks = new Bitboard[Color.values.length];
final Bitboard[] queens = new Bitboard[Color.values.length];
final Bitboard[] kings = new Bitboard[Color.values.length];
final int[] material = new int[Color.values.length];
final int[][] castlingRights = new int[Color.values.length][Castling.values.length];
int enPassant = Square.NOSQUARE;
int activeColor = WHITE;
int halfMoveClock = 0;
private int halfMoveNumber;
long zobristKey = 0;
private static final long[][] zobristPiece = new long[Piece.values.length][BOARDSIZE];
private static final long[][] zobristCastling = new long[Color.values.length][Castling.values.length];
private static final long[] zobristEnPassant = new long[BOARDSIZE];
private static final long zobristActiveColor;
// We will save some board parameters in a State before making a move.
// Later we will restore them before undoing a move.
private final State[] stack = new State[MAX_GAMEMOVES];
private int stackSize = 0;
private static final class State {
private long zobristKey = 0;
private final int[][] castlingRights = new int[Color.values.length][Castling.values.length];
private int enPassant = Square.NOSQUARE;
private int halfMoveClock = 0;
private State() {
for (int color : Color.values) {
for (int castling : Castling.values) {
castlingRights[color][castling] = File.NOFILE;
}
}
}
}
private static final class Zobrist {
private final SecureRandom random = new SecureRandom();
private byte[] result() {
// Generate some random bytes for our keys
byte[] bytes = new byte[16];
random.nextBytes(bytes);
return bytes;
}
private long next() {
byte[] result = result();
long hash = 0L;
for (int i = 0; i < result.length; ++i) {
hash ^= ((long) (result[i] & 0xFF)) << ((i * 8) % 64);
}
return hash;
}
}
// Initialize the zobrist keys
static {
Zobrist zobrist = new Zobrist();
for (int piece : Piece.values) {
for (int i = 0; i < BOARDSIZE; ++i) {
zobristPiece[piece][i] = zobrist.next();
}
}
zobristCastling[WHITE][KINGSIDE] = zobrist.next();
zobristCastling[WHITE][QUEENSIDE] = zobrist.next();
zobristCastling[BLACK][KINGSIDE] = zobrist.next();
zobristCastling[BLACK][QUEENSIDE] = zobrist.next();
for (int i = 0; i < BOARDSIZE; ++i) {
zobristEnPassant[i] = zobrist.next();
}
zobristActiveColor = zobrist.next();
}
public Board(GenericBoard genericBoard) {
assert genericBoard != null;
// Initialize stack
for (int i = 0; i < stack.length; ++i) {
stack[i] = new State();
}
// Initialize piece type lists
for (int color : Color.values) {
pawns[color] = new Bitboard();
knights[color] = new Bitboard();
bishops[color] = new Bitboard();
rooks[color] = new Bitboard();
queens[color] = new Bitboard();
kings[color] = new Bitboard();
}
// Initialize material
for (int color : Color.values) {
material[color] = 0;
}
// Initialize board
for (int square : Square.values) {
board[square] = Piece.NOPIECE;
GenericPiece genericPiece = genericBoard.getPiece(Square.toGenericPosition(square));
if (genericPiece != null) {
int piece = Piece.valueOf(genericPiece);
put(piece, square);
}
}
// Initialize castling
for (int color : Color.values) {
for (int castling : Castling.values) {
GenericFile genericFile = genericBoard.getCastling(
Color.toGenericColor(color), Castling.toGenericCastling(castling)
);
if (genericFile != null) {
castlingRights[color][castling] = File.valueOf(genericFile);
zobristKey ^= zobristCastling[color][castling];
} else {
castlingRights[color][castling] = File.NOFILE;
}
}
}
// Initialize en passant
if (genericBoard.getEnPassant() != null) {
enPassant = Square.valueOf(genericBoard.getEnPassant());
zobristKey ^= zobristEnPassant[enPassant];
}
// Initialize active color
if (activeColor != Color.valueOf(genericBoard.getActiveColor())) {
activeColor = Color.valueOf(genericBoard.getActiveColor());
zobristKey ^= zobristActiveColor;
}
// Initialize half move clock
halfMoveClock = genericBoard.getHalfMoveClock();
// Initialize the full move number
setFullMoveNumber(genericBoard.getFullMoveNumber());
}
GenericBoard toGenericBoard() {
GenericBoard genericBoard = new GenericBoard();
// Set board
for (int square : Square.values) {
if (board[square] != Piece.NOPIECE) {
genericBoard.setPiece(Piece.toGenericPiece(board[square]), Square.toGenericPosition(square));
}
}
// Set castling
for (int color : Color.values) {
for (int castling : Castling.values) {
if (castlingRights[color][castling] != File.NOFILE) {
genericBoard.setCastling(
Color.toGenericColor(color),
Castling.toGenericCastling(castling),
File.toGenericFile(castlingRights[color][castling])
);
}
}
}
// Set en passant
if (enPassant != Square.NOSQUARE) {
genericBoard.setEnPassant(Square.toGenericPosition(enPassant));
}
// Set active color
genericBoard.setActiveColor(Color.toGenericColor(activeColor));
// Set half move clock
genericBoard.setHalfMoveClock(halfMoveClock);
// Set full move number
genericBoard.setFullMoveNumber(getFullMoveNumber());
return genericBoard;
}
public String toString() {
return toGenericBoard().toString();
}
int getFullMoveNumber() {
return halfMoveNumber / 2;
}
private void setFullMoveNumber(int fullMoveNumber) {
assert fullMoveNumber > 0;
halfMoveNumber = fullMoveNumber * 2;
if (activeColor == Color.valueOf(GenericColor.BLACK)) {
++halfMoveNumber;
}
}
boolean isRepetition() {
int j = Math.max(0, stackSize - halfMoveClock);
for (int i = stackSize - 2; i >= j; i -= 2) {
if (zobristKey == stack[i].zobristKey) {
return true;
}
}
return false;
}
boolean hasInsufficientMaterial() {
return pawns[Color.WHITE].size() == 0 && pawns[Color.BLACK].size() == 0
&& rooks[Color.WHITE].size() == 0 && rooks[Color.BLACK].size() == 0
&& queens[Color.WHITE].size() == 0 && queens[Color.BLACK].size() == 0
&& (knights[Color.WHITE].size() + knights[Color.BLACK].size()
+ bishops[Color.WHITE].size() + bishops[Color.BLACK].size() <= 1);
}
/**
* Puts a piece at the square. We need to update our board and the appropriate
* piece type list.
*
* @param piece the Piece.
* @param square the Square.
*/
private void put(int piece, int square) {
assert Piece.isValid(piece);
assert Square.isValid(square);
assert board[square] == Piece.NOPIECE;
int pieceType = Piece.getType(piece);
int color = Piece.getColor(piece);
switch (pieceType) {
case Piece.Type.PAWN:
pawns[color].add(square);
material[color] += Evaluation.PAWN_VALUE;
break;
case Piece.Type.KNIGHT:
knights[color].add(square);
material[color] += Evaluation.KNIGHT_VALUE;
break;
case Piece.Type.BISHOP:
bishops[color].add(square);
material[color] += Evaluation.BISHOP_VALUE;
break;
case Piece.Type.ROOK:
rooks[color].add(square);
material[color] += Evaluation.ROOK_VALUE;
break;
case Piece.Type.QUEEN:
queens[color].add(square);
material[color] += Evaluation.QUEEN_VALUE;
break;
case Piece.Type.KING:
kings[color].add(square);
material[color] += Evaluation.KING_VALUE;
break;
default:
assert false : pieceType;
break;
}
board[square] = piece;
zobristKey ^= zobristPiece[piece][square];
}
/**
* Removes a piece from the square. We need to update our board and the
* appropriate piece type list.
*
* @param square the Square.
* @return the Piece which was removed.
*/
private int remove(int square) {
assert Square.isValid(square);
assert Piece.isValid(board[square]);
int piece = board[square];
int pieceType = Piece.getType(piece);
int color = Piece.getColor(piece);
switch (pieceType) {
case Piece.Type.PAWN:
pawns[color].remove(square);
material[color] -= Evaluation.PAWN_VALUE;
break;
case Piece.Type.KNIGHT:
knights[color].remove(square);
material[color] -= Evaluation.KNIGHT_VALUE;
break;
case Piece.Type.BISHOP:
bishops[color].remove(square);
material[color] -= Evaluation.BISHOP_VALUE;
break;
case Piece.Type.ROOK:
rooks[color].remove(square);
material[color] -= Evaluation.ROOK_VALUE;
break;
case Piece.Type.QUEEN:
queens[color].remove(square);
material[color] -= Evaluation.QUEEN_VALUE;
break;
case Piece.Type.KING:
kings[color].remove(square);
material[color] -= Evaluation.KING_VALUE;
break;
default:
assert false : pieceType;
break;
}
board[square] = Piece.NOPIECE;
zobristKey ^= zobristPiece[piece][square];
return piece;
}
public void makeMove(int move) {
State entry = stack[stackSize];
// Get variables
int type = Move.getType(move);
int originSquare = Move.getOriginSquare(move);
int targetSquare = Move.getTargetSquare(move);
int originPiece = Move.getOriginPiece(move);
int originColor = Piece.getColor(originPiece);
int targetPiece = Move.getTargetPiece(move);
// Save zobristKey
entry.zobristKey = zobristKey;
// Save castling rights
for (int color : Color.values) {
for (int castling : Castling.values) {
entry.castlingRights[color][castling] = castlingRights[color][castling];
}
}
// Save enPassant
entry.enPassant = enPassant;
// Save halfMoveClock
entry.halfMoveClock = halfMoveClock;
// Remove target piece and update castling rights
if (targetPiece != Piece.NOPIECE) {
int captureSquare = targetSquare;
if (type == Move.Type.ENPASSANT) {
captureSquare += (originColor == WHITE ? Square.S : Square.N);
}
assert targetPiece == board[captureSquare];
assert Piece.getType(targetPiece) != Piece.Type.KING;
remove(captureSquare);
clearCastling(captureSquare);
}
// Move piece
assert originPiece == board[originSquare];
remove(originSquare);
if (type == Move.Type.PAWNPROMOTION) {
put(Piece.valueOf(Move.getPromotion(move), originColor), targetSquare);
} else {
put(originPiece, targetSquare);
}
// Move rook and update castling rights
if (type == Move.Type.CASTLING) {
int rookOriginSquare = Square.NOSQUARE;
int rookTargetSquare = Square.NOSQUARE;
switch (targetSquare) {
case Square.g1:
rookOriginSquare = Square.h1;
rookTargetSquare = Square.f1;
break;
case Square.c1:
rookOriginSquare = Square.a1;
rookTargetSquare = Square.d1;
break;
case Square.g8:
rookOriginSquare = Square.h8;
rookTargetSquare = Square.f8;
break;
case Square.c8:
rookOriginSquare = Square.a8;
rookTargetSquare = Square.d8;
break;
default:
assert false : targetSquare;
break;
}
assert Piece.getType(board[rookOriginSquare]) == Piece.Type.ROOK;
int rookPiece = remove(rookOriginSquare);
put(rookPiece, rookTargetSquare);
}
// Update castling
clearCastling(originSquare);
// Update enPassant
if (enPassant != Square.NOSQUARE) {
zobristKey ^= zobristEnPassant[enPassant];
}
if (type == Move.Type.PAWNDOUBLE) {
enPassant = targetSquare + (originColor == WHITE ? Square.S : Square.N);
assert Square.isValid(enPassant);
zobristKey ^= zobristEnPassant[enPassant];
} else {
enPassant = Square.NOSQUARE;
}
// Update activeColor
activeColor = Color.opposite(activeColor);
zobristKey ^= zobristActiveColor;
// Update halfMoveClock
if (Piece.getType(originPiece) == Piece.Type.PAWN || targetPiece != Piece.NOPIECE) {
halfMoveClock = 0;
} else {
++halfMoveClock;
}
// Update fullMoveNumber
++halfMoveNumber;
++stackSize;
assert stackSize < MAX_GAMEMOVES;
}
public void undoMove(int move) {
--stackSize;
assert stackSize >= 0;
State entry = stack[stackSize];
// Get variables
int type = Move.getType(move);
int originSquare = Move.getOriginSquare(move);
int targetSquare = Move.getTargetSquare(move);
int originPiece = Move.getOriginPiece(move);
int originColor = Piece.getColor(originPiece);
int targetPiece = Move.getTargetPiece(move);
// Update fullMoveNumber
--halfMoveNumber;
// Update activeColor
activeColor = Color.opposite(activeColor);
// Undo move rook
if (type == Move.Type.CASTLING) {
int rookOriginSquare = Square.NOSQUARE;
int rookTargetSquare = Square.NOSQUARE;
switch (targetSquare) {
case Square.g1:
rookOriginSquare = Square.h1;
rookTargetSquare = Square.f1;
break;
case Square.c1:
rookOriginSquare = Square.a1;
rookTargetSquare = Square.d1;
break;
case Square.g8:
rookOriginSquare = Square.h8;
rookTargetSquare = Square.f8;
break;
case Square.c8:
rookOriginSquare = Square.a8;
rookTargetSquare = Square.d8;
break;
default:
assert false : targetSquare;
break;
}
assert Piece.getType(board[rookTargetSquare]) == Piece.Type.ROOK;
int rookPiece = remove(rookTargetSquare);
put(rookPiece, rookOriginSquare);
}
// Undo move piece
remove(targetSquare);
put(originPiece, originSquare);
// Restore target piece
if (targetPiece != Piece.NOPIECE) {
int captureSquare = targetSquare;
if (type == Move.Type.ENPASSANT) {
captureSquare += (originColor == WHITE ? Square.S : Square.N);
assert Square.isValid(captureSquare);
}
put(targetPiece, captureSquare);
}
// Restore halfMoveClock
halfMoveClock = entry.halfMoveClock;
// Restore enPassant
enPassant = entry.enPassant;
// Restore castling rights
for (int color : Color.values) {
for (int castling : Castling.values) {
if (entry.castlingRights[color][castling] != castlingRights[color][castling]) {
castlingRights[color][castling] = entry.castlingRights[color][castling];
}
}
}
// Restore zobristKey
zobristKey = entry.zobristKey;
}
private void clearCastling(int color, int castling) {
assert Color.isValid(color);
assert Castling.isValid(castling);
if (castlingRights[color][castling] != File.NOFILE) {
castlingRights[color][castling] = File.NOFILE;
zobristKey ^= zobristCastling[color][castling];
}
}
private void clearCastling(int square) {
assert Square.isLegal(square);
switch (square) {
case Square.a1:
clearCastling(WHITE, QUEENSIDE);
break;
case Square.h1:
clearCastling(WHITE, KINGSIDE);
break;
case Square.a8:
clearCastling(BLACK, QUEENSIDE);
break;
case Square.h8:
clearCastling(BLACK, KINGSIDE);
break;
case Square.e1:
clearCastling(WHITE, QUEENSIDE);
clearCastling(WHITE, KINGSIDE);
break;
case Square.e8:
clearCastling(BLACK, QUEENSIDE);
clearCastling(BLACK, KINGSIDE);
break;
default:
break;
}
}
public boolean isCheck() {
// Check whether our king is attacked by any opponent piece
return isAttacked(Bitboard.next(kings[activeColor].squares), Color.opposite(activeColor));
}
/**
* Returns whether the targetSquare is attacked by any piece from the
* attackerColor. We will backtrack from the targetSquare to find the piece.
*
* @param targetSquare the target Square.
* @param attackerColor the attacker Color.
* @return whether the targetSquare is attacked.
*/
boolean isAttacked(int targetSquare, int attackerColor) {
assert Square.isValid(targetSquare);
assert Color.isValid(attackerColor);
// Pawn attacks
int pawnPiece = Piece.valueOf(Piece.Type.PAWN, attackerColor);
for (int i = 1; i < MoveGenerator.moveDeltaPawn[attackerColor].length; ++i) {
int attackerSquare = targetSquare - MoveGenerator.moveDeltaPawn[attackerColor][i];
if (Square.isLegal(attackerSquare)) {
int attackerPawn = board[attackerSquare];
if (attackerPawn == pawnPiece) {
return true;
}
}
}
return isAttacked(targetSquare,
Piece.valueOf(Piece.Type.KNIGHT, attackerColor),
MoveGenerator.moveDeltaKnight)
// The queen moves like a bishop, so check both piece types
|| isAttacked(targetSquare,
Piece.valueOf(Piece.Type.BISHOP, attackerColor),
Piece.valueOf(Piece.Type.QUEEN, attackerColor),
MoveGenerator.moveDeltaBishop)
// The queen moves like a rook, so check both piece types
|| isAttacked(targetSquare,
Piece.valueOf(Piece.Type.ROOK, attackerColor),
Piece.valueOf(Piece.Type.QUEEN, attackerColor),
MoveGenerator.moveDeltaRook)
|| isAttacked(targetSquare,
Piece.valueOf(Piece.Type.KING, attackerColor),
MoveGenerator.moveDeltaKing);
}
/**
* Returns whether the targetSquare is attacked by a non-sliding piece.
*/
private boolean isAttacked(int targetSquare, int attackerPiece, int[] moveDelta) {
assert Square.isValid(targetSquare);
assert Piece.isValid(attackerPiece);
assert moveDelta != null;
for (int delta : moveDelta) {
int attackerSquare = targetSquare + delta;
if (Square.isLegal(attackerSquare) && board[attackerSquare] == attackerPiece) {
return true;
}
}
return false;
}
/**
* Returns whether the targetSquare is attacked by a sliding piece.
*/
private boolean isAttacked(int targetSquare, int attackerPiece, int queenPiece, int[] moveDelta) {
assert Square.isValid(targetSquare);
assert Piece.isValid(attackerPiece);
assert Piece.isValid(queenPiece);
assert moveDelta != null;
for (int delta : moveDelta) {
int attackerSquare = targetSquare + delta;
while (Square.isLegal(attackerSquare)) {
int piece = board[attackerSquare];
if (Piece.isValid(piece)) {
if (piece == attackerPiece || piece == queenPiece) {
return true;
}
break;
} else {
attackerSquare += delta;
}
}
}
return false;
}
}
|
src/main/java/com/fluxchess/pulse/Board.java
|
/*
* Copyright 2013-2014 the original author or authors.
*
* This file is part of Pulse Chess.
*
* Pulse Chess is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Pulse Chess is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Pulse Chess. If not, see <http://www.gnu.org/licenses/>.
*/
package com.fluxchess.pulse;
import com.fluxchess.jcpi.models.GenericBoard;
import com.fluxchess.jcpi.models.GenericColor;
import com.fluxchess.jcpi.models.GenericFile;
import com.fluxchess.jcpi.models.GenericPiece;
import java.security.SecureRandom;
import static com.fluxchess.pulse.Castling.KINGSIDE;
import static com.fluxchess.pulse.Castling.QUEENSIDE;
import static com.fluxchess.pulse.Color.BLACK;
import static com.fluxchess.pulse.Color.WHITE;
/**
* This is our internal board.
*/
public final class Board {
private static final int MAX_GAMEMOVES = Search.MAX_PLY + 1024;
private static final int BOARDSIZE = 128;
final int[] board = new int[BOARDSIZE];
final Bitboard[] pawns = new Bitboard[Color.values.length];
final Bitboard[] knights = new Bitboard[Color.values.length];
final Bitboard[] bishops = new Bitboard[Color.values.length];
final Bitboard[] rooks = new Bitboard[Color.values.length];
final Bitboard[] queens = new Bitboard[Color.values.length];
final Bitboard[] kings = new Bitboard[Color.values.length];
final int[] material = new int[Color.values.length];
final int[][] castlingRights = new int[Color.values.length][Castling.values.length];
int enPassant = Square.NOSQUARE;
int activeColor = WHITE;
int halfMoveClock = 0;
private int halfMoveNumber;
long zobristKey = 0;
private static final long[][] zobristPiece = new long[Piece.values.length][BOARDSIZE];
private static final long[][] zobristCastling = new long[Color.values.length][Castling.values.length];
private static final long[] zobristEnPassant = new long[BOARDSIZE];
private static final long zobristActiveColor;
// We will save some board parameters in a State before making a move.
// Later we will restore them before undoing a move.
private final State[] stack = new State[MAX_GAMEMOVES];
private int stackSize = 0;
private static final class State {
private long zobristKey = 0;
private final int[][] castlingRights = new int[Color.values.length][Castling.values.length];
private int enPassant = Square.NOSQUARE;
private int halfMoveClock = 0;
private State() {
for (int color : Color.values) {
for (int castling : Castling.values) {
castlingRights[color][castling] = File.NOFILE;
}
}
}
}
private static final class Zobrist {
private final SecureRandom random = new SecureRandom();
private byte[] result() {
// Generate some random bytes for our keys
byte[] bytes = new byte[16];
random.nextBytes(bytes);
return bytes;
}
private long next() {
byte[] result = result();
long hash = 0L;
for (int i = 0; i < result.length; ++i) {
hash ^= ((long) (result[i] & 0xFF)) << ((i * 8) % 64);
}
return hash;
}
}
// Initialize the zobrist keys
static {
Zobrist zobrist = new Zobrist();
for (int piece : Piece.values) {
for (int i = 0; i < BOARDSIZE; ++i) {
zobristPiece[piece][i] = zobrist.next();
}
}
zobristCastling[WHITE][KINGSIDE] = zobrist.next();
zobristCastling[WHITE][QUEENSIDE] = zobrist.next();
zobristCastling[BLACK][KINGSIDE] = zobrist.next();
zobristCastling[BLACK][QUEENSIDE] = zobrist.next();
for (int i = 0; i < BOARDSIZE; ++i) {
zobristEnPassant[i] = zobrist.next();
}
zobristActiveColor = zobrist.next();
}
public Board(GenericBoard genericBoard) {
assert genericBoard != null;
// Initialize stack
for (int i = 0; i < stack.length; ++i) {
stack[i] = new State();
}
// Initialize piece type lists
for (int color : Color.values) {
pawns[color] = new Bitboard();
knights[color] = new Bitboard();
bishops[color] = new Bitboard();
rooks[color] = new Bitboard();
queens[color] = new Bitboard();
kings[color] = new Bitboard();
}
// Initialize material
for (int color : Color.values) {
material[color] = 0;
}
// Initialize board
for (int square : Square.values) {
board[square] = Piece.NOPIECE;
GenericPiece genericPiece = genericBoard.getPiece(Square.toGenericPosition(square));
if (genericPiece != null) {
int piece = Piece.valueOf(genericPiece);
put(piece, square);
}
}
// Initialize castling
for (int color : Color.values) {
for (int castling : Castling.values) {
GenericFile genericFile = genericBoard.getCastling(
Color.toGenericColor(color), Castling.toGenericCastling(castling)
);
if (genericFile != null) {
castlingRights[color][castling] = File.valueOf(genericFile);
zobristKey ^= zobristCastling[color][castling];
} else {
castlingRights[color][castling] = File.NOFILE;
}
}
}
// Initialize en passant
if (genericBoard.getEnPassant() != null) {
enPassant = Square.valueOf(genericBoard.getEnPassant());
zobristKey ^= zobristEnPassant[enPassant];
}
// Initialize active color
if (activeColor != Color.valueOf(genericBoard.getActiveColor())) {
activeColor = Color.valueOf(genericBoard.getActiveColor());
zobristKey ^= zobristActiveColor;
}
// Initialize half move clock
halfMoveClock = genericBoard.getHalfMoveClock();
// Initialize the full move number
setFullMoveNumber(genericBoard.getFullMoveNumber());
}
GenericBoard toGenericBoard() {
GenericBoard genericBoard = new GenericBoard();
// Set board
for (int square : Square.values) {
if (board[square] != Piece.NOPIECE) {
genericBoard.setPiece(Piece.toGenericPiece(board[square]), Square.toGenericPosition(square));
}
}
// Set castling
for (int color : Color.values) {
for (int castling : Castling.values) {
if (castlingRights[color][castling] != File.NOFILE) {
genericBoard.setCastling(
Color.toGenericColor(color),
Castling.toGenericCastling(castling),
File.toGenericFile(castlingRights[color][castling])
);
}
}
}
// Set en passant
if (enPassant != Square.NOSQUARE) {
genericBoard.setEnPassant(Square.toGenericPosition(enPassant));
}
// Set active color
genericBoard.setActiveColor(Color.toGenericColor(activeColor));
// Set half move clock
genericBoard.setHalfMoveClock(halfMoveClock);
// Set full move number
genericBoard.setFullMoveNumber(getFullMoveNumber());
return genericBoard;
}
public String toString() {
return toGenericBoard().toString();
}
int getFullMoveNumber() {
return halfMoveNumber / 2;
}
private void setFullMoveNumber(int fullMoveNumber) {
assert fullMoveNumber > 0;
halfMoveNumber = fullMoveNumber * 2;
if (activeColor == Color.valueOf(GenericColor.BLACK)) {
++halfMoveNumber;
}
}
boolean isRepetition() {
int j = Math.max(0, stackSize - halfMoveClock);
for (int i = stackSize - 2; i >= j; i -= 2) {
if (zobristKey == stack[i].zobristKey) {
return true;
}
}
return false;
}
boolean hasInsufficientMaterial() {
return pawns[Color.WHITE].size() == 0 && pawns[Color.BLACK].size() == 0
&& rooks[Color.WHITE].size() == 0 && rooks[Color.BLACK].size() == 0
&& queens[Color.WHITE].size() == 0 && queens[Color.BLACK].size() == 0
&& (knights[Color.WHITE].size() + knights[Color.BLACK].size()
+ bishops[Color.WHITE].size() + bishops[Color.BLACK].size() <= 1);
}
/**
* Puts a piece at the square. We need to update our board and the appropriate
* piece type list.
*
* @param piece the Piece.
* @param square the Square.
*/
private void put(int piece, int square) {
assert Piece.isValid(piece);
assert Square.isValid(square);
assert board[square] == Piece.NOPIECE;
int pieceType = Piece.getType(piece);
int color = Piece.getColor(piece);
switch (pieceType) {
case Piece.Type.PAWN:
pawns[color].add(square);
material[color] += Evaluation.PAWN_VALUE;
break;
case Piece.Type.KNIGHT:
knights[color].add(square);
material[color] += Evaluation.KNIGHT_VALUE;
break;
case Piece.Type.BISHOP:
bishops[color].add(square);
material[color] += Evaluation.BISHOP_VALUE;
break;
case Piece.Type.ROOK:
rooks[color].add(square);
material[color] += Evaluation.ROOK_VALUE;
break;
case Piece.Type.QUEEN:
queens[color].add(square);
material[color] += Evaluation.QUEEN_VALUE;
break;
case Piece.Type.KING:
kings[color].add(square);
material[color] += Evaluation.KING_VALUE;
break;
default:
assert false : pieceType;
break;
}
board[square] = piece;
zobristKey ^= zobristPiece[piece][square];
}
/**
* Removes a piece from the square. We need to update our board and the
* appropriate piece type list.
*
* @param square the Square.
* @return the Piece which was removed.
*/
private int remove(int square) {
assert Square.isValid(square);
assert Piece.isValid(board[square]);
int piece = board[square];
int pieceType = Piece.getType(piece);
int color = Piece.getColor(piece);
switch (pieceType) {
case Piece.Type.PAWN:
pawns[color].remove(square);
material[color] -= Evaluation.PAWN_VALUE;
break;
case Piece.Type.KNIGHT:
knights[color].remove(square);
material[color] -= Evaluation.KNIGHT_VALUE;
break;
case Piece.Type.BISHOP:
bishops[color].remove(square);
material[color] -= Evaluation.BISHOP_VALUE;
break;
case Piece.Type.ROOK:
rooks[color].remove(square);
material[color] -= Evaluation.ROOK_VALUE;
break;
case Piece.Type.QUEEN:
queens[color].remove(square);
material[color] -= Evaluation.QUEEN_VALUE;
break;
case Piece.Type.KING:
kings[color].remove(square);
material[color] -= Evaluation.KING_VALUE;
break;
default:
assert false : pieceType;
break;
}
board[square] = Piece.NOPIECE;
zobristKey ^= zobristPiece[piece][square];
return piece;
}
public void makeMove(int move) {
State entry = stack[stackSize];
// Get variables
int type = Move.getType(move);
int originSquare = Move.getOriginSquare(move);
int targetSquare = Move.getTargetSquare(move);
int originPiece = Move.getOriginPiece(move);
int originColor = Piece.getColor(originPiece);
int targetPiece = Move.getTargetPiece(move);
// Save zobristKey
entry.zobristKey = zobristKey;
// Save castling rights
for (int color : Color.values) {
for (int castling : Castling.values) {
entry.castlingRights[color][castling] = castlingRights[color][castling];
}
}
// Save enPassant
entry.enPassant = enPassant;
// Save halfMoveClock
entry.halfMoveClock = halfMoveClock;
// Remove target piece and update castling rights
if (targetPiece != Piece.NOPIECE) {
int captureSquare = targetSquare;
if (type == Move.Type.ENPASSANT) {
captureSquare += (originColor == WHITE ? Square.S : Square.N);
}
assert targetPiece == board[captureSquare];
assert Piece.getType(targetPiece) != Piece.Type.KING;
remove(captureSquare);
clearCastling(captureSquare);
}
// Move piece
assert originPiece == board[originSquare];
remove(originSquare);
if (type == Move.Type.PAWNPROMOTION) {
put(Piece.valueOf(Move.getPromotion(move), originColor), targetSquare);
} else {
put(originPiece, targetSquare);
}
// Move rook and update castling rights
if (type == Move.Type.CASTLING) {
int rookOriginSquare = Square.NOSQUARE;
int rookTargetSquare = Square.NOSQUARE;
switch (targetSquare) {
case Square.g1:
rookOriginSquare = Square.h1;
rookTargetSquare = Square.f1;
break;
case Square.c1:
rookOriginSquare = Square.a1;
rookTargetSquare = Square.d1;
break;
case Square.g8:
rookOriginSquare = Square.h8;
rookTargetSquare = Square.f8;
break;
case Square.c8:
rookOriginSquare = Square.a8;
rookTargetSquare = Square.d8;
break;
default:
assert false : targetSquare;
break;
}
assert Piece.getType(board[rookOriginSquare]) == Piece.Type.ROOK;
int rookPiece = remove(rookOriginSquare);
put(rookPiece, rookTargetSquare);
}
// Update castling
clearCastling(originSquare);
// Update enPassant
if (enPassant != Square.NOSQUARE) {
zobristKey ^= zobristEnPassant[enPassant];
}
if (type == Move.Type.PAWNDOUBLE) {
enPassant = targetSquare + (originColor == WHITE ? Square.S : Square.N);
assert Square.isValid(enPassant);
zobristKey ^= zobristEnPassant[enPassant];
} else {
enPassant = Square.NOSQUARE;
}
// Update activeColor
activeColor = Color.opposite(activeColor);
zobristKey ^= zobristActiveColor;
// Update halfMoveClock
if (Piece.getType(originPiece) == Piece.Type.PAWN || targetPiece != Piece.NOPIECE) {
halfMoveClock = 0;
} else {
++halfMoveClock;
}
// Update fullMoveNumber
++halfMoveNumber;
++stackSize;
assert stackSize < MAX_GAMEMOVES;
}
public void undoMove(int move) {
--stackSize;
assert stackSize >= 0;
State entry = stack[stackSize];
// Get variables
int type = Move.getType(move);
int originSquare = Move.getOriginSquare(move);
int targetSquare = Move.getTargetSquare(move);
int originPiece = Move.getOriginPiece(move);
int originColor = Piece.getColor(originPiece);
int targetPiece = Move.getTargetPiece(move);
// Update fullMoveNumber
--halfMoveNumber;
// Update activeColor
activeColor = Color.opposite(activeColor);
// Undo move rook
if (type == Move.Type.CASTLING) {
int rookOriginSquare = Square.NOSQUARE;
int rookTargetSquare = Square.NOSQUARE;
switch (targetSquare) {
case Square.g1:
rookOriginSquare = Square.h1;
rookTargetSquare = Square.f1;
break;
case Square.c1:
rookOriginSquare = Square.a1;
rookTargetSquare = Square.d1;
break;
case Square.g8:
rookOriginSquare = Square.h8;
rookTargetSquare = Square.f8;
break;
case Square.c8:
rookOriginSquare = Square.a8;
rookTargetSquare = Square.d8;
break;
default:
assert false : targetSquare;
break;
}
assert Piece.getType(board[rookTargetSquare]) == Piece.Type.ROOK;
int rookPiece = remove(rookTargetSquare);
put(rookPiece, rookOriginSquare);
}
// Undo move piece
remove(targetSquare);
put(originPiece, originSquare);
// Restore target piece
if (targetPiece != Piece.NOPIECE) {
int captureSquare = targetSquare;
if (type == Move.Type.ENPASSANT) {
captureSquare += (originColor == WHITE ? Square.S : Square.N);
assert Square.isValid(captureSquare);
}
put(targetPiece, captureSquare);
}
// Restore halfMoveClock
halfMoveClock = entry.halfMoveClock;
// Restore enPassant
enPassant = entry.enPassant;
// Restore castling rights
for (int color : Color.values) {
for (int castling : Castling.values) {
if (entry.castlingRights[color][castling] != castlingRights[color][castling]) {
castlingRights[color][castling] = entry.castlingRights[color][castling];
}
}
}
// Restore zobristKey
zobristKey = entry.zobristKey;
}
private void clearCastling(int color, int castling) {
assert Color.isValid(color);
assert Castling.isValid(castling);
if (castlingRights[color][castling] != File.NOFILE) {
castlingRights[color][castling] = File.NOFILE;
zobristKey ^= zobristCastling[color][castling];
}
}
private void clearCastling(int square) {
assert Square.isLegal(square);
switch (square) {
case Square.a1:
clearCastling(WHITE, QUEENSIDE);
break;
case Square.h1:
clearCastling(WHITE, KINGSIDE);
break;
case Square.a8:
clearCastling(BLACK, QUEENSIDE);
break;
case Square.h8:
clearCastling(BLACK, KINGSIDE);
break;
case Square.e1:
clearCastling(WHITE, QUEENSIDE);
clearCastling(WHITE, KINGSIDE);
break;
case Square.e8:
clearCastling(BLACK, QUEENSIDE);
clearCastling(BLACK, KINGSIDE);
break;
default:
break;
}
}
public boolean isCheck() {
// Check whether our king is attacked by any opponent piece
return isAttacked(Bitboard.next(kings[activeColor].squares), Color.opposite(activeColor));
}
/**
* Returns whether the targetSquare is attacked by any piece from the
* attackerColor. We will backtrack from the targetSquare to find the piece.
*
* @param targetSquare the target Square.
* @param attackerColor the attacker Color.
* @return whether the targetSquare is attacked.
*/
boolean isAttacked(int targetSquare, int attackerColor) {
assert Square.isValid(targetSquare);
assert Color.isValid(attackerColor);
// Pawn attacks
int pawnPiece = Piece.valueOf(Piece.Type.PAWN, attackerColor);
for (int i = 1; i < MoveGenerator.moveDeltaPawn[attackerColor].length; ++i) {
int attackerSquare = targetSquare - MoveGenerator.moveDeltaPawn[attackerColor][i];
if (Square.isLegal(attackerSquare)) {
int attackerPawn = board[attackerSquare];
if (attackerPawn == pawnPiece) {
return true;
}
}
}
return isAttacked(targetSquare, attackerColor, Piece.Type.KNIGHT, MoveGenerator.moveDeltaKnight)
|| isAttacked(targetSquare, attackerColor, Piece.Type.BISHOP, MoveGenerator.moveDeltaBishop)
|| isAttacked(targetSquare, attackerColor, Piece.Type.ROOK, MoveGenerator.moveDeltaRook)
|| isAttacked(targetSquare, attackerColor, Piece.Type.QUEEN, MoveGenerator.moveDeltaQueen)
|| isAttacked(targetSquare, attackerColor, Piece.Type.KING, MoveGenerator.moveDeltaKing);
}
private boolean isAttacked(int targetSquare, int attackerColor, int attackerPieceType, int[] moveDelta) {
assert Square.isValid(targetSquare);
assert Color.isValid(attackerColor);
assert Piece.Type.isValid(attackerPieceType);
assert moveDelta != null;
boolean sliding = Piece.Type.isSliding(attackerPieceType);
for (int delta : moveDelta) {
int attackerSquare = targetSquare + delta;
while (Square.isLegal(attackerSquare)) {
int attackerPiece = board[attackerSquare];
if (Piece.isValid(attackerPiece)) {
if (Piece.getType(attackerPiece) == attackerPieceType
&& Piece.getColor(attackerPiece) == attackerColor) {
return true;
}
break;
} else {
if (!sliding) {
break;
}
attackerSquare += delta;
}
}
}
return false;
}
}
|
Improve performance in isAttacked()
|
src/main/java/com/fluxchess/pulse/Board.java
|
Improve performance in isAttacked()
|
|
Java
|
epl-1.0
|
16e527c3b82c24e646355f55910179f9c5138b08
| 0
|
violinlakshmi/opendaylight,mandeepdhami/controller,violinlakshmi/opendaylight,aryantaheri/monitoring-controller,aryantaheri/controller,tx1103mark/controller,my76128/controller,opendaylight/controller,tx1103mark/controller,aryantaheri/controller,aryantaheri/monitoring-controller,aryantaheri/controller,inocybe/odl-controller,my76128/controller,my76128/controller,aryantaheri/monitoring-controller,mandeepdhami/controller,Johnson-Chou/test,violinlakshmi/opendaylight,tx1103mark/controller,inocybe/odl-controller,522986491/controller,Sushma7785/OpenDayLight-Load-Balancer,mandeepdhami/controller,Johnson-Chou/test,aryantaheri/monitoring-controller,Sushma7785/OpenDayLight-Load-Balancer,tx1103mark/controller,my76128/controller,522986491/controller,mandeepdhami/controller
|
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
/**
* Class which will monitor the completion of a FlowEntryDistributionOrder it
* implements a Future interface so it can be inspected by who is waiting for
* it.
*/
package org.opendaylight.controller.forwardingrulesmanager.internal;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.opendaylight.controller.forwardingrulesmanager.implementation.data.FlowEntryDistributionOrder;
import org.opendaylight.controller.sal.utils.Status;
import org.opendaylight.controller.sal.utils.StatusCode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Class which will monitor the completion of a FlowEntryDistributionOrder it
* implements a Future interface so it can be inspected by who is waiting for
* it.
*/
final class FlowEntryDistributionOrderFutureTask implements Future<Status> {
private final FlowEntryDistributionOrder order;
private boolean amICancelled;
private CountDownLatch waitingLatch;
private Status retStatus;
private static final Logger logger = LoggerFactory.getLogger(FlowEntryDistributionOrderFutureTask.class);
/**
* @param order
* for which we are monitoring the execution
*/
FlowEntryDistributionOrderFutureTask(FlowEntryDistributionOrder order) {
// Order being monitored
this.order = order;
this.amICancelled = false;
// We need to wait for one completion to happen
this.waitingLatch = new CountDownLatch(1);
// No return status yet!
this.retStatus = new Status(StatusCode.UNDEFINED);
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public Status get() throws InterruptedException, ExecutionException {
logger.trace("Getting status for order {}", this.order);
// If i'm done lets return the status as many times as caller wants
if (this.waitingLatch.getCount() == 0L) {
logger.trace("get returns the status without waiting");
return retStatus;
}
logger.trace("Start waiting for status to come back");
// Wait till someone signal that we are done
this.waitingLatch.await();
logger.trace("Waiting for the status is over, returning it");
// Return the known status
return retStatus;
}
@Override
public Status get(long timeout, TimeUnit unit) throws InterruptedException,
ExecutionException, TimeoutException {
logger.trace("Getting status for order {}", this.order);
// If i'm done lets return the status as many times as caller wants
if (this.waitingLatch.getCount() == 0L) {
logger.trace("get returns the status without waiting");
return retStatus;
}
logger.trace("Start waiting for status to come back");
// Wait till someone signal that we are done
this.waitingLatch.await(timeout, unit);
logger.trace("Waiting for the status is over, returning it");
// Return the known status, could also be null if didn't return
return retStatus;
}
@Override
public boolean isCancelled() {
return this.amICancelled;
}
@Override
public boolean isDone() {
return (this.waitingLatch.getCount() == 0L);
}
/**
* Used by the thread that gets back the status for the order so can unblock
* an eventual caller waiting on the result to comes back
*
* @param order
* @param retStatus
*/
void gotStatus(FlowEntryDistributionOrder order, Status retStatus) {
logger.trace("Got status for order:{} \n Status:{}", order, retStatus);
if (!order.equals(this.order)) {
logger.error("Didn't get a result for an order we did issue order expected:{}, order received:{}",
this.order, order);
// Weird we got a call for an order we didn't make
return;
}
this.retStatus = retStatus;
// Now we are not waiting any longer
this.waitingLatch.countDown();
logger.trace("Unlocked the Future");
}
}
|
opendaylight/forwardingrulesmanager/implementation/src/main/java/org/opendaylight/controller/forwardingrulesmanager/internal/FlowEntryDistributionOrderFutureTask.java
|
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
/**
* Class which will monitor the completion of a FlowEntryDistributionOrder it
* implements a Future interface so it can be inspected by who is waiting for
* it.
*/
package org.opendaylight.controller.forwardingrulesmanager.internal;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.opendaylight.controller.forwardingrulesmanager.implementation.data.FlowEntryDistributionOrder;
import org.opendaylight.controller.sal.utils.Status;
import org.opendaylight.controller.sal.utils.StatusCode;
/**
* Class which will monitor the completion of a FlowEntryDistributionOrder it
* implements a Future interface so it can be inspected by who is waiting for
* it.
*/
final class FlowEntryDistributionOrderFutureTask implements Future<Status> {
private final FlowEntryDistributionOrder order;
private boolean amICancelled;
private CountDownLatch waitingLatch;
private Status retStatus;
/**
* @param order
* for which we are monitoring the execution
*/
FlowEntryDistributionOrderFutureTask(FlowEntryDistributionOrder order) {
// Order being monitored
this.order = order;
this.amICancelled = false;
// We need to wait for one completion to happen
this.waitingLatch = new CountDownLatch(1);
// No return status yet!
this.retStatus = new Status(StatusCode.UNDEFINED);
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public Status get() throws InterruptedException, ExecutionException {
// If i'm done lets return the status as many times as caller wants
if (this.waitingLatch.getCount() == 0L) {
return retStatus;
}
// Wait till someone signal that we are done
this.waitingLatch.await();
// Return the known status
return retStatus;
}
@Override
public Status get(long timeout, TimeUnit unit) throws InterruptedException,
ExecutionException, TimeoutException {
// If i'm done lets return the status as many times as caller wants
if (this.waitingLatch.getCount() == 0L) {
return retStatus;
}
// Wait till someone signal that we are done
this.waitingLatch.await(timeout, unit);
// Return the known status, could also be null if didn't return
return retStatus;
}
@Override
public boolean isCancelled() {
return this.amICancelled;
}
@Override
public boolean isDone() {
return (this.waitingLatch.getCount() == 0L);
}
/**
* Used by the thread that gets back the status for the order so can unblock
* an eventual caller waiting on the result to comes back
*
* @param order
* @param retStatus
*/
void gotStatus(FlowEntryDistributionOrder order, Status retStatus) {
if (order != this.order) {
// Weird we got a call for an order we didn't make
return;
}
this.retStatus = retStatus;
// Now we are not waiting any longer
this.waitingLatch.countDown();
}
}
|
FRMsync get stuck, miscelaneous fix
- The future FlowEntryDistributionOrderFutureTask was erroneously
thinking and entry was not expected simply because the equality check
was failing.
- Added more extensive logging to catch further bugs in the area.
Change-Id: I8c2cb08ecf7bd9ea3623d79eae9a3ec16f023724
Signed-off-by: Giovanni Meo <8f30079aa2f0f06e12398c14a38a6013c42d387e@cisco.com>
|
opendaylight/forwardingrulesmanager/implementation/src/main/java/org/opendaylight/controller/forwardingrulesmanager/internal/FlowEntryDistributionOrderFutureTask.java
|
FRMsync get stuck, miscelaneous fix
|
|
Java
|
epl-1.0
|
c1607004bfbbea69f718790bb16952b8c9f5b4a3
| 0
|
floralvikings/jenjin
|
package com.jenjinstudios.world.client.message;
import com.jenjinstudios.core.io.Message;
import static org.testng.Assert.assertFalse;
/**
* @author Caleb Brinkman
*/
public class ExecutableWorldLogoutResponseTest extends WorldClientExecutableMessageTest
{
@Override
public void testMessageExecution() throws Exception {
Message worldLogoutResponse = messageRegistry.createMessage("WorldLogoutResponse");
worldLogoutResponse.setArgument("success", true);
inStreamReadMessage.thenReturn(worldLogoutResponse, blankMessageSpam);
worldClient.blockingStart();
worldClient.sendBlockingWorldFileRequest();
worldClient.sendBlockingLoginRequest();
worldClient.sendBlockingLogoutRequest();
Thread.sleep(500); // Sleep to allow client to "catch up"
assertFalse(worldClient.isLoggedIn());
}
}
|
jenjin-world-client/src/test/java/com/jenjinstudios/world/client/message/ExecutableWorldLogoutResponseTest.java
|
package com.jenjinstudios.world.client.message;
import com.jenjinstudios.core.io.Message;
import static org.testng.Assert.assertFalse;
/**
* @author Caleb Brinkman
*/
public class ExecutableWorldLogoutResponseTest extends WorldClientExecutableMessageTest
{
@Override
public void testMessageExecution() throws Exception {
Message worldLogoutResponse = messageRegistry.createMessage("WorldLogoutResponse");
worldLogoutResponse.setArgument("success", true);
inStreamReadMessage.thenReturn(worldLogoutResponse, blankMessageSpam);
worldClient.blockingStart();
worldClient.sendBlockingWorldFileRequest();
worldClient.sendBlockingLoginRequest();
Thread.sleep(500); // Sleep to allow client to "catch up"
assertFalse(worldClient.isLoggedIn());
}
}
|
Added call to sendBlockingLogoutRequest
|
jenjin-world-client/src/test/java/com/jenjinstudios/world/client/message/ExecutableWorldLogoutResponseTest.java
|
Added call to sendBlockingLogoutRequest
|
|
Java
|
agpl-3.0
|
e4c529fa1f26e066b930add300f46bd34af3c14e
| 0
|
MilosKozak/AndroidAPS,RoumenGeorgiev/AndroidAPS,winni67/AndroidAPS,AdrianLxM/AndroidAPS,jotomo/AndroidAPS,LadyViktoria/AndroidAPS,LadyViktoria/AndroidAPS,samihusseingit/AndroidAPS,RoumenGeorgiev/AndroidAPS,Heiner1/AndroidAPS,AdrianLxM/AndroidAPS,winni67/AndroidAPS,Heiner1/AndroidAPS,Heiner1/AndroidAPS,jotomo/AndroidAPS,PoweRGbg/AndroidAPS,PoweRGbg/AndroidAPS,PoweRGbg/AndroidAPS,MilosKozak/AndroidAPS,samihusseingit/AndroidAPS,Heiner1/AndroidAPS,MilosKozak/AndroidAPS,jotomo/AndroidAPS
|
package info.nightscout.androidaps.plugins.CircadianPercentageProfile;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import info.nightscout.androidaps.Config;
import info.nightscout.androidaps.Constants;
import info.nightscout.androidaps.MainApp;
import info.nightscout.androidaps.R;
import info.nightscout.androidaps.interfaces.PluginBase;
import info.nightscout.androidaps.interfaces.ProfileInterface;
import info.nightscout.client.data.NSProfile;
import info.nightscout.utils.DecimalFormatter;
import info.nightscout.utils.SafeParse;
import info.nightscout.utils.ToastUtils;
/**
* Created by Adrian on 12.11.2016.
* Based on SimpleProfile created by mike on 05.08.2016.
*/
public class CircadianPercentageProfilePlugin implements PluginBase, ProfileInterface {
public static final String SETTINGS_PREFIX = "CircadianPercentageProfile";
private static Logger log = LoggerFactory.getLogger(CircadianPercentageProfilePlugin.class);
private static boolean fragmentEnabled = true;
private static boolean fragmentVisible = true;
private static NSProfile convertedProfile = null;
boolean mgdl;
boolean mmol;
Double dia;
Double targetLow;
Double targetHigh;
public int percentage;
public int timeshift;
double[] basebasal = new double[]{1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d};
double[] baseisf = new double[]{35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d};
double[] baseic = new double[]{4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d};
public CircadianPercentageProfilePlugin() {
loadSettings();
}
@Override
public String getFragmentClass() {
return CircadianPercentageProfileFragment.class.getName();
}
@Override
public int getType() {
return PluginBase.PROFILE;
}
@Override
public String getName() {
return MainApp.instance().getString(R.string.circadian_percentage_profile);
}
@Override
public String getNameShort() {
String name = MainApp.sResources.getString(R.string.circadian_percentage_profile_shortname);
if (!name.trim().isEmpty()){
//only if translation exists
return name;
}
// use long name as fallback
return getName();
}
@Override
public boolean isEnabled(int type) {
return type == PROFILE && fragmentEnabled;
}
@Override
public boolean isVisibleInTabs(int type) {
return type == PROFILE && fragmentVisible;
}
@Override
public boolean canBeHidden(int type) {
return true;
}
@Override
public void setFragmentEnabled(int type, boolean fragmentEnabled) {
if (type == PROFILE) this.fragmentEnabled = fragmentEnabled;
}
@Override
public void setFragmentVisible(int type, boolean fragmentVisible) {
if (type == PROFILE) this.fragmentVisible = fragmentVisible;
}
void storeSettings() {
if (Config.logPrefsChange)
log.debug("Storing settings");
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(MainApp.instance().getApplicationContext());
SharedPreferences.Editor editor = settings.edit();
editor.putBoolean(SETTINGS_PREFIX + "mmol", mmol);
editor.putBoolean(SETTINGS_PREFIX + "mgdl", mgdl);
editor.putString(SETTINGS_PREFIX + "dia", dia.toString());
editor.putString(SETTINGS_PREFIX + "targetlow", targetLow.toString());
editor.putString(SETTINGS_PREFIX + "targethigh", targetHigh.toString());
editor.putString(SETTINGS_PREFIX + "timeshift", timeshift + "");
editor.putString(SETTINGS_PREFIX + "percentage", percentage + "");
for (int i = 0; i < 24; i++) {
editor.putString(SETTINGS_PREFIX + "basebasal" + i, DecimalFormatter.to2Decimal(basebasal[i]));
editor.putString(SETTINGS_PREFIX + "baseisf" + i, DecimalFormatter.to2Decimal(baseisf[i]));
editor.putString(SETTINGS_PREFIX + "baseic" + i, DecimalFormatter.to2Decimal(baseic[i]));
}
editor.commit();
createConvertedProfile();
}
void loadSettings() {
if (Config.logPrefsChange)
log.debug("Loading stored settings");
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(MainApp.instance().getApplicationContext());
if (settings.contains(SETTINGS_PREFIX + "mgdl"))
try {
mgdl = settings.getBoolean(SETTINGS_PREFIX + "mgdl", true);
} catch (Exception e) {
log.debug(e.getMessage());
}
else mgdl = true;
if (settings.contains(SETTINGS_PREFIX + "mmol"))
try {
mmol = settings.getBoolean(SETTINGS_PREFIX + "mmol", false);
} catch (Exception e) {
log.debug(e.getMessage());
}
else mmol = false;
if (settings.contains(SETTINGS_PREFIX + "dia"))
try {
dia = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "dia", "3"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else dia = 3d;
if (settings.contains(SETTINGS_PREFIX + "targetlow"))
try {
targetLow = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "targetlow", "80"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else targetLow = 80d;
if (settings.contains(SETTINGS_PREFIX + "targethigh"))
try {
targetHigh = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "targethigh", "120"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else targetHigh = 120d;
if (settings.contains(SETTINGS_PREFIX + "percentage"))
try {
percentage = SafeParse.stringToInt(settings.getString(SETTINGS_PREFIX + "percentage", "100"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else percentage = 100;
if (settings.contains(SETTINGS_PREFIX + "timeshift"))
try {
timeshift = SafeParse.stringToInt(settings.getString(SETTINGS_PREFIX + "timeshift", "0"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else timeshift = 0;
for (int i = 0; i < 24; i++) {
try {
basebasal[i] = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "basebasal" + i, DecimalFormatter.to2Decimal(basebasal[i])));
} catch (Exception e) {
log.debug(e.getMessage());
}
try {
baseic[i] = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "baseic" + i, DecimalFormatter.to2Decimal(baseic[i])));
} catch (Exception e) {
log.debug(e.getMessage());
}
try {
baseisf[i] = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "baseisf" + i, DecimalFormatter.to2Decimal(baseisf[i])));
} catch (Exception e) {
log.debug(e.getMessage());
}
}
createConvertedProfile();
}
private void createConvertedProfile() {
JSONObject json = new JSONObject();
JSONObject store = new JSONObject();
JSONObject profile = new JSONObject();
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(DecimalFormatter.to2Decimal(sum(basebasal)));
stringBuilder.append("U@");
stringBuilder.append(percentage);
stringBuilder.append("%>");
stringBuilder.append(timeshift);
stringBuilder.append("h");
String profileName = stringBuilder.toString();
try {
json.put("defaultProfile", profileName);
json.put("store", store);
profile.put("dia", dia);
int offset = -(timeshift % 24) + 24;
JSONArray icArray = new JSONArray();
for (int i = 0; i < 24; i++) {
icArray.put(new JSONObject().put("timeAsSeconds", i * 60 * 60).put("value", baseic[(offset + i) % 24] * 100d / percentage));
}
profile.put("carbratio", icArray);
JSONArray isfArray = new JSONArray();
for (int i = 0; i < 24; i++) {
isfArray.put(new JSONObject().put("timeAsSeconds", i * 60 * 60).put("value", baseisf[(offset + i) % 24] * 100d / percentage));
}
profile.put("sens", isfArray);
JSONArray basalArray = new JSONArray();
for (int i = 0; i < 24; i++) {
basalArray.put(new JSONObject().put("timeAsSeconds", i * 60 * 60).put("value", basebasal[(offset + i) % 24] * percentage / 100d));
}
profile.put("basal", basalArray);
profile.put("target_low", new JSONArray().put(new JSONObject().put("timeAsSeconds", 0).put("value", targetLow)));
profile.put("target_high", new JSONArray().put(new JSONObject().put("timeAsSeconds", 0).put("value", targetHigh)));
profile.put("units", mgdl ? Constants.MGDL : Constants.MMOL);
store.put(profileName, profile);
} catch (JSONException e) {
e.printStackTrace();
}
convertedProfile = new NSProfile(json, profileName);
}
@Override
public NSProfile getProfile() {
performLimitCheck();
return convertedProfile;
}
private void performLimitCheck() {
if (percentage < Constants.CPP_MIN_PERCENTAGE || percentage > Constants.CPP_MAX_PERCENTAGE){
String msg = String.format(MainApp.sResources.getString(R.string.openapsma_valueoutofrange), "Profile-Percentage");
log.error(msg);
MainApp.getConfigBuilder().uploadError(msg);
ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), msg, R.raw.error);
percentage = Math.max(percentage, Constants.CPP_MIN_PERCENTAGE);
percentage = Math.min(percentage, Constants.CPP_MAX_PERCENTAGE);
}
}
String basalString() {
return profileString(basebasal, timeshift, percentage, true);
}
String icString() {
return profileString(baseic, timeshift, percentage, false);
}
String isfString() {
return profileString(baseisf, timeshift, percentage, false);
}
String baseIcString() {
return profileString(baseic, 0, 100, false);
}
String baseIsfString() {
return profileString(baseisf, 0, 100, false);
}
String baseBasalString() {return profileString(basebasal, 0, 100, true);}
public double baseBasalSum(){
return sum(basebasal);
}
public double percentageBasalSum(){
double result = 0;
for (int i = 0; i < basebasal.length; i++) {
result += SafeParse.stringToDouble(DecimalFormatter.to2Decimal(basebasal[i] * percentage / 100d));
}
return result;
}
public static double sum(double values[]){
double result = 0;
for (int i = 0; i < values.length; i++) {
result += values[i];
}
return result;
}
private static String profileString(double[] values, int timeshift, int percentage, boolean inc) {
timeshift = -(timeshift % 24) + 24;
StringBuilder sb = new StringBuilder();
sb.append("<b>");
sb.append(0);
sb.append("h: ");
sb.append("</b>");
sb.append(DecimalFormatter.to2Decimal(values[(timeshift + 0) % 24] * (inc ? percentage / 100d : 100d / percentage)));
double prevVal = values[(timeshift + 0) % 24];
for (int i = 1; i < 24; i++) {
if (prevVal != values[(timeshift + i) % 24]) {
sb.append(", ");
sb.append("<b>");
sb.append(i);
sb.append("h: ");
sb.append("</b>");
sb.append(DecimalFormatter.to2Decimal(values[(timeshift + i) % 24] * (inc ? percentage / 100d : 100d / percentage)));
prevVal = values[(timeshift + i) % 24];
}
}
return sb.toString();
}
}
|
app/src/main/java/info/nightscout/androidaps/plugins/CircadianPercentageProfile/CircadianPercentageProfilePlugin.java
|
package info.nightscout.androidaps.plugins.CircadianPercentageProfile;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import info.nightscout.androidaps.Config;
import info.nightscout.androidaps.Constants;
import info.nightscout.androidaps.MainApp;
import info.nightscout.androidaps.R;
import info.nightscout.androidaps.interfaces.PluginBase;
import info.nightscout.androidaps.interfaces.ProfileInterface;
import info.nightscout.client.data.NSProfile;
import info.nightscout.utils.DecimalFormatter;
import info.nightscout.utils.SafeParse;
import info.nightscout.utils.ToastUtils;
/**
* Created by Adrian on 12.11.2016.
* Based on SimpleProfile created by mike on 05.08.2016.
*/
public class CircadianPercentageProfilePlugin implements PluginBase, ProfileInterface {
public static final String SETTINGS_PREFIX = "CircadianPercentageProfile";
private static Logger log = LoggerFactory.getLogger(CircadianPercentageProfilePlugin.class);
private static boolean fragmentEnabled = true;
private static boolean fragmentVisible = true;
private static NSProfile convertedProfile = null;
boolean mgdl;
boolean mmol;
Double dia;
Double targetLow;
Double targetHigh;
public int percentage;
public int timeshift;
double[] basebasal = new double[]{1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 1d};
double[] baseisf = new double[]{35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d, 35d};
double[] baseic = new double[]{4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d, 4d};
public CircadianPercentageProfilePlugin() {
loadSettings();
}
@Override
public String getFragmentClass() {
return CircadianPercentageProfileFragment.class.getName();
}
@Override
public int getType() {
return PluginBase.PROFILE;
}
@Override
public String getName() {
return MainApp.instance().getString(R.string.circadian_percentage_profile);
}
@Override
public String getNameShort() {
String name = MainApp.sResources.getString(R.string.circadian_percentage_profile_shortname);
if (!name.trim().isEmpty()){
//only if translation exists
return name;
}
// use long name as fallback
return getName();
}
@Override
public boolean isEnabled(int type) {
return type == PROFILE && fragmentEnabled;
}
@Override
public boolean isVisibleInTabs(int type) {
return type == PROFILE && fragmentVisible;
}
@Override
public boolean canBeHidden(int type) {
return true;
}
@Override
public void setFragmentEnabled(int type, boolean fragmentEnabled) {
if (type == PROFILE) this.fragmentEnabled = fragmentEnabled;
}
@Override
public void setFragmentVisible(int type, boolean fragmentVisible) {
if (type == PROFILE) this.fragmentVisible = fragmentVisible;
}
void storeSettings() {
if (Config.logPrefsChange)
log.debug("Storing settings");
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(MainApp.instance().getApplicationContext());
SharedPreferences.Editor editor = settings.edit();
editor.putBoolean(SETTINGS_PREFIX + "mmol", mmol);
editor.putBoolean(SETTINGS_PREFIX + "mgdl", mgdl);
editor.putString(SETTINGS_PREFIX + "dia", dia.toString());
editor.putString(SETTINGS_PREFIX + "targetlow", targetLow.toString());
editor.putString(SETTINGS_PREFIX + "targethigh", targetHigh.toString());
editor.putString(SETTINGS_PREFIX + "timeshift", timeshift + "");
editor.putString(SETTINGS_PREFIX + "percentage", percentage + "");
for (int i = 0; i < 24; i++) {
editor.putString(SETTINGS_PREFIX + "basebasal" + i, DecimalFormatter.to2Decimal(basebasal[i]));
editor.putString(SETTINGS_PREFIX + "baseisf" + i, DecimalFormatter.to2Decimal(baseisf[i]));
editor.putString(SETTINGS_PREFIX + "baseic" + i, DecimalFormatter.to2Decimal(baseic[i]));
}
editor.commit();
createConvertedProfile();
}
void loadSettings() {
if (Config.logPrefsChange)
log.debug("Loading stored settings");
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(MainApp.instance().getApplicationContext());
if (settings.contains(SETTINGS_PREFIX + "mgdl"))
try {
mgdl = settings.getBoolean(SETTINGS_PREFIX + "mgdl", true);
} catch (Exception e) {
log.debug(e.getMessage());
}
else mgdl = true;
if (settings.contains(SETTINGS_PREFIX + "mmol"))
try {
mmol = settings.getBoolean(SETTINGS_PREFIX + "mmol", false);
} catch (Exception e) {
log.debug(e.getMessage());
}
else mmol = false;
if (settings.contains(SETTINGS_PREFIX + "dia"))
try {
dia = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "dia", "3"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else dia = 3d;
if (settings.contains(SETTINGS_PREFIX + "targetlow"))
try {
targetLow = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "targetlow", "80"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else targetLow = 80d;
if (settings.contains(SETTINGS_PREFIX + "targethigh"))
try {
targetHigh = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "targethigh", "120"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else targetHigh = 120d;
if (settings.contains(SETTINGS_PREFIX + "percentage"))
try {
percentage = SafeParse.stringToInt(settings.getString(SETTINGS_PREFIX + "percentage", "100"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else percentage = 100;
if (settings.contains(SETTINGS_PREFIX + "timeshift"))
try {
timeshift = SafeParse.stringToInt(settings.getString(SETTINGS_PREFIX + "timeshift", "0"));
} catch (Exception e) {
log.debug(e.getMessage());
}
else timeshift = 0;
for (int i = 0; i < 24; i++) {
try {
basebasal[i] = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "basebasal" + i, DecimalFormatter.to2Decimal(basebasal[i])));
} catch (Exception e) {
log.debug(e.getMessage());
}
try {
baseic[i] = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "baseic" + i, DecimalFormatter.to2Decimal(baseic[i])));
} catch (Exception e) {
log.debug(e.getMessage());
}
try {
baseisf[i] = SafeParse.stringToDouble(settings.getString(SETTINGS_PREFIX + "baseisf" + i, DecimalFormatter.to2Decimal(baseisf[i])));
} catch (Exception e) {
log.debug(e.getMessage());
}
}
createConvertedProfile();
}
private void createConvertedProfile() {
JSONObject json = new JSONObject();
JSONObject store = new JSONObject();
JSONObject profile = new JSONObject();
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(DecimalFormatter.to2Decimal(sum(basebasal)));
stringBuilder.append("U@");
stringBuilder.append(percentage);
stringBuilder.append("%>");
stringBuilder.append(timeshift);
stringBuilder.append("h");
String profileName = stringBuilder.toString();
try {
json.put("defaultProfile", profileName);
json.put("store", store);
profile.put("dia", dia);
int offset = -(timeshift % 24) + 24;
JSONArray icArray = new JSONArray();
for (int i = 0; i < 24; i++) {
icArray.put(new JSONObject().put("timeAsSeconds", i * 60 * 60).put("value", baseic[(offset + i) % 24] * 100d / percentage));
}
profile.put("carbratio", icArray);
JSONArray isfArray = new JSONArray();
for (int i = 0; i < 24; i++) {
isfArray.put(new JSONObject().put("timeAsSeconds", i * 60 * 60).put("value", baseisf[(offset + i) % 24] * 100d / percentage));
}
profile.put("sens", isfArray);
JSONArray basalArray = new JSONArray();
for (int i = 0; i < 24; i++) {
basalArray.put(new JSONObject().put("timeAsSeconds", i * 60 * 60).put("value", basebasal[(offset + i) % 24] * percentage / 100d));
}
profile.put("basal", basalArray);
profile.put("target_low", new JSONArray().put(new JSONObject().put("timeAsSeconds", 0).put("value", targetLow)));
profile.put("target_high", new JSONArray().put(new JSONObject().put("timeAsSeconds", 0).put("value", targetHigh)));
profile.put("units", mgdl ? Constants.MGDL : Constants.MMOL);
store.put(profileName, profile);
} catch (JSONException e) {
e.printStackTrace();
}
convertedProfile = new NSProfile(json, profileName);
}
@Override
public NSProfile getProfile() {
// performLimitCheck();
return convertedProfile;
}
private void performLimitCheck() {
if (percentage < Constants.CPP_MIN_PERCENTAGE || percentage > Constants.CPP_MAX_PERCENTAGE){
String msg = String.format(MainApp.sResources.getString(R.string.openapsma_valueoutofrange), "Profile-Percentage");
log.error(msg);
MainApp.getConfigBuilder().uploadError(msg);
ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), msg, R.raw.error);
percentage = Math.max(percentage, Constants.CPP_MIN_PERCENTAGE);
percentage = Math.min(percentage, Constants.CPP_MAX_PERCENTAGE);
}
}
String basalString() {
return profileString(basebasal, timeshift, percentage, true);
}
String icString() {
return profileString(baseic, timeshift, percentage, false);
}
String isfString() {
return profileString(baseisf, timeshift, percentage, false);
}
String baseIcString() {
return profileString(baseic, 0, 100, false);
}
String baseIsfString() {
return profileString(baseisf, 0, 100, false);
}
String baseBasalString() {return profileString(basebasal, 0, 100, true);}
public double baseBasalSum(){
return sum(basebasal);
}
public double percentageBasalSum(){
double result = 0;
for (int i = 0; i < basebasal.length; i++) {
result += SafeParse.stringToDouble(DecimalFormatter.to2Decimal(basebasal[i] * percentage / 100d));
}
return result;
}
public static double sum(double values[]){
double result = 0;
for (int i = 0; i < values.length; i++) {
result += values[i];
}
return result;
}
private static String profileString(double[] values, int timeshift, int percentage, boolean inc) {
timeshift = -(timeshift % 24) + 24;
StringBuilder sb = new StringBuilder();
sb.append("<b>");
sb.append(0);
sb.append("h: ");
sb.append("</b>");
sb.append(DecimalFormatter.to2Decimal(values[(timeshift + 0) % 24] * (inc ? percentage / 100d : 100d / percentage)));
double prevVal = values[(timeshift + 0) % 24];
for (int i = 1; i < 24; i++) {
if (prevVal != values[(timeshift + i) % 24]) {
sb.append(", ");
sb.append("<b>");
sb.append(i);
sb.append("h: ");
sb.append("</b>");
sb.append(DecimalFormatter.to2Decimal(values[(timeshift + i) % 24] * (inc ? percentage / 100d : 100d / percentage)));
prevVal = values[(timeshift + i) % 24];
}
}
return sb.toString();
}
}
|
reenable safety function
|
app/src/main/java/info/nightscout/androidaps/plugins/CircadianPercentageProfile/CircadianPercentageProfilePlugin.java
|
reenable safety function
|
|
Java
|
lgpl-2.1
|
dfba932d46848dd24da41755ca7482132bb174b3
| 0
|
mediaworx/opencms-core,victos/opencms-core,alkacon/opencms-core,MenZil/opencms-core,mediaworx/opencms-core,ggiudetti/opencms-core,MenZil/opencms-core,alkacon/opencms-core,gallardo/opencms-core,alkacon/opencms-core,MenZil/opencms-core,alkacon/opencms-core,MenZil/opencms-core,victos/opencms-core,gallardo/opencms-core,gallardo/opencms-core,ggiudetti/opencms-core,victos/opencms-core,mediaworx/opencms-core,gallardo/opencms-core,mediaworx/opencms-core,ggiudetti/opencms-core,ggiudetti/opencms-core,victos/opencms-core
|
/*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.db;
import org.opencms.configuration.CmsParameterConfiguration;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.security.I_CmsCredentialsResolver;
import org.opencms.util.CmsStringUtil;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.dbcp.ConnectionFactory;
import org.apache.commons.dbcp.DriverManagerConnectionFactory;
import org.apache.commons.dbcp.PoolableConnectionFactory;
import org.apache.commons.dbcp.PoolingDriver;
import org.apache.commons.pool.impl.GenericKeyedObjectPool;
import org.apache.commons.pool.impl.GenericKeyedObjectPoolFactory;
import org.apache.commons.pool.impl.GenericObjectPool;
/**
* Various methods to create DBCP pools.<p>
*
* Only JDBC Driver based pools are supported currently. JNDI DataSource
* based pools might be added probably later.<p>
*
* <b>Please note:</b> This class is subject to change in later versions.
* To obtain information about the connections, please use the
* {@link org.opencms.db.CmsSqlManager}.<p>
*
* @since 6.0.0
*/
public final class CmsDbPool {
/** This prefix is required to make the JDBC DriverManager return pooled DBCP connections. */
public static final String DBCP_JDBC_URL_PREFIX = "jdbc:apache:commons:dbcp:";
/** Key for number of connection attempts. */
public static final String KEY_CONNECT_ATTEMTS = "connects";
/** Key for connection waiting. */
public static final String KEY_CONNECT_WAITS = "wait";
/** Prefix for database keys. */
public static final String KEY_DATABASE = "db.";
/** Key for the database name. */
public static final String KEY_DATABASE_NAME = KEY_DATABASE + "name";
/** Key for the pool id. */
public static final String KEY_DATABASE_POOL = KEY_DATABASE + "pool";
/** Key for statement pooling. */
public static final String KEY_DATABASE_STATEMENTS = KEY_DATABASE + "statements";
/** Key for the entity manager pool size. */
public static final String KEY_ENTITY_MANAGER_POOL_SIZE = "entityMangerPoolSize";
/** Key for jdbc driver. */
public static final String KEY_CONNECTION_PROPERTIES = "connectionProperties";
/** Key for jdbc driver. */
public static final String KEY_JDBC_DRIVER = "jdbcDriver";
/** Key for jdbc url. */
public static final String KEY_JDBC_URL = "jdbcUrl";
/** Key for jdbc url params. */
public static final String KEY_JDBC_URL_PARAMS = KEY_JDBC_URL + ".params";
/** Key for maximum active connections. */
public static final String KEY_MAX_ACTIVE = "maxActive";
/** Key for maximum idle connections. */
public static final String KEY_MAX_IDLE = "maxIdle";
/** Key for maximum wait time. */
public static final String KEY_MAX_WAIT = "maxWait";
/** Key for minimum idle time before a connection is subject to an eviction test. */
public static final String KEY_MIN_EVICTABLE_IDLE_TIME = "minEvictableIdleTime";
/** Key for minimum number of connections kept open. */
public static final String KEY_MIN_IDLE = "minIdle";
/** Key for number of tested connections per run. */
public static final String KEY_NUM_TESTS_PER_EVICTION_RUN = "numTestsPerEvictionRun";
/** Key for database password. */
public static final String KEY_PASSWORD = "password";
/** Key for default. */
public static final String KEY_POOL_DEFAULT = "default";
/** Key for pool url. */
public static final String KEY_POOL_URL = "poolUrl";
/** Key for pool user. */
public static final String KEY_POOL_USER = "user";
/** Key for vfs pool. */
public static final String KEY_POOL_VFS = "vfs";
/** Key for pooling flag. */
public static final String KEY_POOLING = "pooling";
/** Key for test on borrow flag. */
public static final String KEY_TEST_ON_BORROW = "testOnBorrow";
/** Key for test query. */
public static final String KEY_TEST_QUERY = "testQuery";
/** Key for test while idle flag. */
public static final String KEY_TEST_WHILE_IDLE = "testWhileIdle";
/** Key for time between two eviction runs. */
public static final String KEY_TIME_BETWEEN_EVICTION_RUNS = "timeBetweenEvictionRuns";
/** Key for user name. */
public static final String KEY_USERNAME = "user";
/** Key for "when pool exhausted" action. */
public static final String KEY_WHEN_EXHAUSTED_ACTION = "whenExhaustedAction";
/** The name of the opencms default pool. */
public static final String OPENCMS_DEFAULT_POOL_NAME = "default";
/** The default OpenCms JDBC pool URL. */
public static final String OPENCMS_DEFAULT_POOL_URL = "opencms:default";
/** The prefix used for opencms JDBC pools. */
public static final String OPENCMS_URL_PREFIX = "opencms:";
/**
* Default constructor.<p>
*
* Nobody is allowed to create an instance of this class!
*/
private CmsDbPool() {
super();
}
/**
* Creates a JDBC DriverManager based DBCP connection pool.<p>
*
* @param config the configuration (opencms.properties)
* @param key the key of the database pool in the configuration
* @return String the URL to access the created DBCP pool
* @throws Exception if the pool could not be initialized
*/
public static PoolingDriver createDriverManagerConnectionPool(CmsParameterConfiguration config, String key)
throws Exception {
// read the values of the pool configuration specified by the given key
String jdbcDriver = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_JDBC_DRIVER);
String jdbcUrl = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_JDBC_URL);
String jdbcUrlParams = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_JDBC_URL_PARAMS);
int maxActive = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_MAX_ACTIVE, 10);
int maxWait = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_MAX_WAIT, 2000);
int maxIdle = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_MAX_IDLE, 5);
int minEvictableIdleTime = config.getInteger(
KEY_DATABASE_POOL + '.' + key + '.' + KEY_MIN_EVICTABLE_IDLE_TIME,
1800000);
int minIdle = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_MIN_IDLE, 0);
int numTestsPerEvictionRun = config.getInteger(KEY_DATABASE_POOL
+ '.'
+ key
+ '.'
+ KEY_NUM_TESTS_PER_EVICTION_RUN, 3);
int timeBetweenEvictionRuns = config.getInteger(KEY_DATABASE_POOL
+ '.'
+ key
+ '.'
+ KEY_TIME_BETWEEN_EVICTION_RUNS, 3600000);
String testQuery = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_TEST_QUERY);
String username = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_USERNAME);
username = OpenCms.getCredentialsResolver().resolveCredential(I_CmsCredentialsResolver.DB_USER, username);
String password = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_PASSWORD);
password = OpenCms.getCredentialsResolver().resolveCredential(I_CmsCredentialsResolver.DB_PASSWORD, password);
String poolUrl = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_POOL_URL);
String whenExhaustedActionValue = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_WHEN_EXHAUSTED_ACTION).trim();
byte whenExhaustedAction = 0;
boolean testOnBorrow = Boolean.valueOf(
config.getString(KEY_DATABASE_POOL + '.' + key + '.' + KEY_TEST_ON_BORROW, "false").trim()).booleanValue();
boolean testWhileIdle = Boolean.valueOf(
config.getString(KEY_DATABASE_POOL + '.' + key + '.' + KEY_TEST_WHILE_IDLE, "false").trim()).booleanValue();
if ("block".equalsIgnoreCase(whenExhaustedActionValue)) {
whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_BLOCK;
} else if ("fail".equalsIgnoreCase(whenExhaustedActionValue)) {
whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_FAIL;
} else if ("grow".equalsIgnoreCase(whenExhaustedActionValue)) {
whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_GROW;
} else {
whenExhaustedAction = GenericObjectPool.DEFAULT_WHEN_EXHAUSTED_ACTION;
}
if ("".equals(testQuery)) {
testQuery = null;
}
if (username == null) {
username = "";
}
if (password == null) {
password = "";
}
// read the values of the statement pool configuration specified by the given key
boolean poolingStmts = Boolean.valueOf(
config.getString(KEY_DATABASE_STATEMENTS + '.' + key + '.' + KEY_POOLING, CmsStringUtil.TRUE).trim()).booleanValue();
int maxActiveStmts = config.getInteger(KEY_DATABASE_STATEMENTS + '.' + key + '.' + KEY_MAX_ACTIVE, 25);
int maxWaitStmts = config.getInteger(KEY_DATABASE_STATEMENTS + '.' + key + '.' + KEY_MAX_WAIT, 250);
int maxIdleStmts = config.getInteger(KEY_DATABASE_STATEMENTS + '.' + key + '.' + KEY_MAX_IDLE, 15);
String whenStmtsExhaustedActionValue = config.get(KEY_DATABASE_STATEMENTS
+ '.'
+ key
+ '.'
+ KEY_WHEN_EXHAUSTED_ACTION);
byte whenStmtsExhaustedAction = GenericKeyedObjectPool.WHEN_EXHAUSTED_GROW;
if (whenStmtsExhaustedActionValue != null) {
whenStmtsExhaustedActionValue = whenStmtsExhaustedActionValue.trim();
whenStmtsExhaustedAction = ("block".equalsIgnoreCase(whenStmtsExhaustedActionValue))
? GenericKeyedObjectPool.WHEN_EXHAUSTED_BLOCK
: ("fail".equalsIgnoreCase(whenStmtsExhaustedActionValue))
? GenericKeyedObjectPool.WHEN_EXHAUSTED_FAIL
: GenericKeyedObjectPool.WHEN_EXHAUSTED_GROW;
}
int connectionAttempts = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_CONNECT_ATTEMTS, 10);
int connetionsWait = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_CONNECT_WAITS, 5000);
// create an instance of the JDBC driver
Class.forName(jdbcDriver).newInstance();
// initialize a keyed object pool to store connections
GenericObjectPool connectionPool = new GenericObjectPool(null);
/* Abandoned pool configuration:
*
* In case the systems encounters "pool exhaustion" (runs out of connections),
* comment the above line with "new GenericObjectPool(null)" and uncomment the
* 5 lines below. This will generate an "abandoned pool" configuration that logs
* abandoned connections to the System.out. Unfortunatly this code is deprecated,
* so to avoid code warnings it's also disabled here.
* Tested with commons-pool v 1.2.
*/
// AbandonedConfig abandonedConfig = new AbandonedConfig();
// abandonedConfig.setLogAbandoned(true);
// abandonedConfig.setRemoveAbandoned(true);
// abandonedConfig.setRemoveAbandonedTimeout(5);
// GenericObjectPool connectionPool = new AbandonedObjectPool(null, abandonedConfig);
// initialize an object pool to store connections
connectionPool.setMaxActive(maxActive);
connectionPool.setMaxIdle(maxIdle);
connectionPool.setMinIdle(minIdle);
connectionPool.setMaxWait(maxWait);
connectionPool.setWhenExhaustedAction(whenExhaustedAction);
if (testQuery != null) {
connectionPool.setTestOnBorrow(testOnBorrow);
connectionPool.setTestWhileIdle(testWhileIdle);
connectionPool.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRuns);
connectionPool.setNumTestsPerEvictionRun(numTestsPerEvictionRun);
connectionPool.setMinEvictableIdleTimeMillis(minEvictableIdleTime);
}
// initialize a connection factory to make the DriverManager taking connections from the pool
if (jdbcUrlParams != null) {
jdbcUrl += jdbcUrlParams;
}
Properties connectionProperties = config.getPrefixedProperties(KEY_DATABASE_POOL + '.' + key + '.' + KEY_CONNECTION_PROPERTIES);
connectionProperties.put(KEY_USERNAME, username);
connectionProperties.put(KEY_PASSWORD, password);
ConnectionFactory connectionFactory = new DriverManagerConnectionFactory(jdbcUrl, connectionProperties);
// Set up statement pool, if desired
GenericKeyedObjectPoolFactory statementFactory = null;
if (poolingStmts) {
statementFactory = new GenericKeyedObjectPoolFactory(
null,
maxActiveStmts,
whenStmtsExhaustedAction,
maxWaitStmts,
maxIdleStmts);
}
// initialize a factory to obtain pooled connections and prepared statements
new PoolableConnectionFactory(connectionFactory, connectionPool, statementFactory, testQuery, false, true);
// initialize a new pooling driver using the pool
PoolingDriver driver = new PoolingDriver();
driver.registerPool(poolUrl, connectionPool);
Connection con = null;
boolean connect = false;
int connectionTests = 0;
// try to connect once to the database to ensure it can be connected to at all
// if the conection cannot be established, multiple attempts will be done to connect
// just in cast the database was not fast enough to start before OpenCms was started
do {
try {
// try to connect
con = connectionFactory.createConnection();
connect = true;
} catch (Exception e) {
// connection failed, increase attempts, sleept for some seconds and log a message
connectionTests++;
if (CmsLog.INIT.isInfoEnabled()) {
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_WAIT_FOR_DB_4,
new Object[] {poolUrl, jdbcUrl, new Integer(connectionTests), new Integer(connetionsWait)}));
}
Thread.sleep(connetionsWait);
} finally {
if (con != null) {
con.close();
}
}
} while (!connect && (connectionTests < connectionAttempts));
if (CmsLog.INIT.isInfoEnabled()) {
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_JDBC_POOL_2, poolUrl, jdbcUrl));
}
return driver;
}
/**
* Returns the database pool name for a given configuration key.<p>
*
* @param configuration the configuration
* @param key a db pool configuration key
* @return the database pool name
*/
public static String getDbPoolName(CmsParameterConfiguration configuration, String key) {
return configuration.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_POOL_URL);
}
/**
* Returns a list of available database pool names.<p>
*
* @param configuration the configuration to read the pool names from
*
* @return a list of database pool names
*/
public static List<String> getDbPoolUrls(CmsParameterConfiguration configuration) {
List<String> dbPoolNames = new ArrayList<String>();
List<String> driverPoolNames = configuration.getList(CmsDriverManager.CONFIGURATION_DB + ".pools");
for (String driverPoolName : driverPoolNames) {
dbPoolNames.add(getDbPoolName(configuration, driverPoolName));
}
return dbPoolNames;
}
/**
* Returns the name of the default database connection pool.<p>
*
* @return the name of the default database connection pool
*/
public static String getDefaultDbPoolName() {
return OPENCMS_DEFAULT_POOL_NAME;
}
}
|
src/org/opencms/db/CmsDbPool.java
|
/*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.db;
import org.opencms.configuration.CmsParameterConfiguration;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.security.I_CmsCredentialsResolver;
import org.opencms.util.CmsStringUtil;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.dbcp.ConnectionFactory;
import org.apache.commons.dbcp.DriverManagerConnectionFactory;
import org.apache.commons.dbcp.PoolableConnectionFactory;
import org.apache.commons.dbcp.PoolingDriver;
import org.apache.commons.pool.impl.GenericKeyedObjectPool;
import org.apache.commons.pool.impl.GenericKeyedObjectPoolFactory;
import org.apache.commons.pool.impl.GenericObjectPool;
/**
* Various methods to create DBCP pools.<p>
*
* Only JDBC Driver based pools are supported currently. JNDI DataSource
* based pools might be added probably later.<p>
*
* <b>Please note:</b> This class is subject to change in later versions.
* To obtain information about the connections, please use the
* {@link org.opencms.db.CmsSqlManager}.<p>
*
* @since 6.0.0
*/
public final class CmsDbPool {
/** This prefix is required to make the JDBC DriverManager return pooled DBCP connections. */
public static final String DBCP_JDBC_URL_PREFIX = "jdbc:apache:commons:dbcp:";
/** Key for number of connection attempts. */
public static final String KEY_CONNECT_ATTEMTS = "connects";
/** Key for connection waiting. */
public static final String KEY_CONNECT_WAITS = "wait";
/** Prefix for database keys. */
public static final String KEY_DATABASE = "db.";
/** Key for the database name. */
public static final String KEY_DATABASE_NAME = KEY_DATABASE + "name";
/** Key for the pool id. */
public static final String KEY_DATABASE_POOL = KEY_DATABASE + "pool";
/** Key for statement pooling. */
public static final String KEY_DATABASE_STATEMENTS = KEY_DATABASE + "statements";
/** Key for the entity manager pool size. */
public static final String KEY_ENTITY_MANAGER_POOL_SIZE = "entityMangerPoolSize";
/** Key for jdbc driver. */
public static final String KEY_JDBC_DRIVER = "jdbcDriver";
/** Key for jdbc url. */
public static final String KEY_JDBC_URL = "jdbcUrl";
/** Key for jdbc url params. */
public static final String KEY_JDBC_URL_PARAMS = KEY_JDBC_URL + ".params";
/** Key for maximum active connections. */
public static final String KEY_MAX_ACTIVE = "maxActive";
/** Key for maximum idle connections. */
public static final String KEY_MAX_IDLE = "maxIdle";
/** Key for maximum wait time. */
public static final String KEY_MAX_WAIT = "maxWait";
/** Key for minimum idle time before a connection is subject to an eviction test. */
public static final String KEY_MIN_EVICTABLE_IDLE_TIME = "minEvictableIdleTime";
/** Key for minimum number of connections kept open. */
public static final String KEY_MIN_IDLE = "minIdle";
/** Key for number of tested connections per run. */
public static final String KEY_NUM_TESTS_PER_EVICTION_RUN = "numTestsPerEvictionRun";
/** Key for database password. */
public static final String KEY_PASSWORD = "password";
/** Key for default. */
public static final String KEY_POOL_DEFAULT = "default";
/** Key for pool url. */
public static final String KEY_POOL_URL = "poolUrl";
/** Key for pool user. */
public static final String KEY_POOL_USER = "user";
/** Key for vfs pool. */
public static final String KEY_POOL_VFS = "vfs";
/** Key for pooling flag. */
public static final String KEY_POOLING = "pooling";
/** Key for test on borrow flag. */
public static final String KEY_TEST_ON_BORROW = "testOnBorrow";
/** Key for test query. */
public static final String KEY_TEST_QUERY = "testQuery";
/** Key for test while idle flag. */
public static final String KEY_TEST_WHILE_IDLE = "testWhileIdle";
/** Key for time between two eviction runs. */
public static final String KEY_TIME_BETWEEN_EVICTION_RUNS = "timeBetweenEvictionRuns";
/** Key for user name. */
public static final String KEY_USERNAME = "user";
/** Key for "when pool exhausted" action. */
public static final String KEY_WHEN_EXHAUSTED_ACTION = "whenExhaustedAction";
/** The name of the opencms default pool. */
public static final String OPENCMS_DEFAULT_POOL_NAME = "default";
/** The default OpenCms JDBC pool URL. */
public static final String OPENCMS_DEFAULT_POOL_URL = "opencms:default";
/** The prefix used for opencms JDBC pools. */
public static final String OPENCMS_URL_PREFIX = "opencms:";
/**
* Default constructor.<p>
*
* Nobody is allowed to create an instance of this class!
*/
private CmsDbPool() {
super();
}
/**
* Creates a JDBC DriverManager based DBCP connection pool.<p>
*
* @param config the configuration (opencms.properties)
* @param key the key of the database pool in the configuration
* @return String the URL to access the created DBCP pool
* @throws Exception if the pool could not be initialized
*/
public static PoolingDriver createDriverManagerConnectionPool(CmsParameterConfiguration config, String key)
throws Exception {
// read the values of the pool configuration specified by the given key
String jdbcDriver = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_JDBC_DRIVER);
String jdbcUrl = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_JDBC_URL);
String jdbcUrlParams = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_JDBC_URL_PARAMS);
int maxActive = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_MAX_ACTIVE, 10);
int maxWait = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_MAX_WAIT, 2000);
int maxIdle = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_MAX_IDLE, 5);
int minEvictableIdleTime = config.getInteger(
KEY_DATABASE_POOL + '.' + key + '.' + KEY_MIN_EVICTABLE_IDLE_TIME,
1800000);
int minIdle = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_MIN_IDLE, 0);
int numTestsPerEvictionRun = config.getInteger(KEY_DATABASE_POOL
+ '.'
+ key
+ '.'
+ KEY_NUM_TESTS_PER_EVICTION_RUN, 3);
int timeBetweenEvictionRuns = config.getInteger(KEY_DATABASE_POOL
+ '.'
+ key
+ '.'
+ KEY_TIME_BETWEEN_EVICTION_RUNS, 3600000);
String testQuery = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_TEST_QUERY);
String username = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_USERNAME);
username = OpenCms.getCredentialsResolver().resolveCredential(I_CmsCredentialsResolver.DB_USER, username);
String password = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_PASSWORD);
password = OpenCms.getCredentialsResolver().resolveCredential(I_CmsCredentialsResolver.DB_PASSWORD, password);
String poolUrl = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_POOL_URL);
String whenExhaustedActionValue = config.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_WHEN_EXHAUSTED_ACTION).trim();
byte whenExhaustedAction = 0;
boolean testOnBorrow = Boolean.valueOf(
config.getString(KEY_DATABASE_POOL + '.' + key + '.' + KEY_TEST_ON_BORROW, "false").trim()).booleanValue();
boolean testWhileIdle = Boolean.valueOf(
config.getString(KEY_DATABASE_POOL + '.' + key + '.' + KEY_TEST_WHILE_IDLE, "false").trim()).booleanValue();
if ("block".equalsIgnoreCase(whenExhaustedActionValue)) {
whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_BLOCK;
} else if ("fail".equalsIgnoreCase(whenExhaustedActionValue)) {
whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_FAIL;
} else if ("grow".equalsIgnoreCase(whenExhaustedActionValue)) {
whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_GROW;
} else {
whenExhaustedAction = GenericObjectPool.DEFAULT_WHEN_EXHAUSTED_ACTION;
}
if ("".equals(testQuery)) {
testQuery = null;
}
if (username == null) {
username = "";
}
if (password == null) {
password = "";
}
// read the values of the statement pool configuration specified by the given key
boolean poolingStmts = Boolean.valueOf(
config.getString(KEY_DATABASE_STATEMENTS + '.' + key + '.' + KEY_POOLING, CmsStringUtil.TRUE).trim()).booleanValue();
int maxActiveStmts = config.getInteger(KEY_DATABASE_STATEMENTS + '.' + key + '.' + KEY_MAX_ACTIVE, 25);
int maxWaitStmts = config.getInteger(KEY_DATABASE_STATEMENTS + '.' + key + '.' + KEY_MAX_WAIT, 250);
int maxIdleStmts = config.getInteger(KEY_DATABASE_STATEMENTS + '.' + key + '.' + KEY_MAX_IDLE, 15);
String whenStmtsExhaustedActionValue = config.get(KEY_DATABASE_STATEMENTS
+ '.'
+ key
+ '.'
+ KEY_WHEN_EXHAUSTED_ACTION);
byte whenStmtsExhaustedAction = GenericKeyedObjectPool.WHEN_EXHAUSTED_GROW;
if (whenStmtsExhaustedActionValue != null) {
whenStmtsExhaustedActionValue = whenStmtsExhaustedActionValue.trim();
whenStmtsExhaustedAction = ("block".equalsIgnoreCase(whenStmtsExhaustedActionValue))
? GenericKeyedObjectPool.WHEN_EXHAUSTED_BLOCK
: ("fail".equalsIgnoreCase(whenStmtsExhaustedActionValue))
? GenericKeyedObjectPool.WHEN_EXHAUSTED_FAIL
: GenericKeyedObjectPool.WHEN_EXHAUSTED_GROW;
}
int connectionAttempts = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_CONNECT_ATTEMTS, 10);
int connetionsWait = config.getInteger(KEY_DATABASE_POOL + '.' + key + '.' + KEY_CONNECT_WAITS, 5000);
// create an instance of the JDBC driver
Class.forName(jdbcDriver).newInstance();
// initialize a keyed object pool to store connections
GenericObjectPool connectionPool = new GenericObjectPool(null);
/* Abandoned pool configuration:
*
* In case the systems encounters "pool exhaustion" (runs out of connections),
* comment the above line with "new GenericObjectPool(null)" and uncomment the
* 5 lines below. This will generate an "abandoned pool" configuration that logs
* abandoned connections to the System.out. Unfortunatly this code is deprecated,
* so to avoid code warnings it's also disabled here.
* Tested with commons-pool v 1.2.
*/
// AbandonedConfig abandonedConfig = new AbandonedConfig();
// abandonedConfig.setLogAbandoned(true);
// abandonedConfig.setRemoveAbandoned(true);
// abandonedConfig.setRemoveAbandonedTimeout(5);
// GenericObjectPool connectionPool = new AbandonedObjectPool(null, abandonedConfig);
// initialize an object pool to store connections
connectionPool.setMaxActive(maxActive);
connectionPool.setMaxIdle(maxIdle);
connectionPool.setMinIdle(minIdle);
connectionPool.setMaxWait(maxWait);
connectionPool.setWhenExhaustedAction(whenExhaustedAction);
if (testQuery != null) {
connectionPool.setTestOnBorrow(testOnBorrow);
connectionPool.setTestWhileIdle(testWhileIdle);
connectionPool.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRuns);
connectionPool.setNumTestsPerEvictionRun(numTestsPerEvictionRun);
connectionPool.setMinEvictableIdleTimeMillis(minEvictableIdleTime);
}
// initialize a connection factory to make the DriverManager taking connections from the pool
if (jdbcUrlParams != null) {
jdbcUrl += jdbcUrlParams;
}
Properties connectionProperties = config.getPrefixedProperties(KEY_DATABASE_POOL + '.' + key + '.' + KEY_CONNECTION_PROPERTIES);
connectionProperties.put(KEY_USERNAME, username);
connectionProperties.put(KEY_PASSWORD, password);
ConnectionFactory connectionFactory = new DriverManagerConnectionFactory(jdbcUrl, connectionProperties);
// Set up statement pool, if desired
GenericKeyedObjectPoolFactory statementFactory = null;
if (poolingStmts) {
statementFactory = new GenericKeyedObjectPoolFactory(
null,
maxActiveStmts,
whenStmtsExhaustedAction,
maxWaitStmts,
maxIdleStmts);
}
// initialize a factory to obtain pooled connections and prepared statements
new PoolableConnectionFactory(connectionFactory, connectionPool, statementFactory, testQuery, false, true);
// initialize a new pooling driver using the pool
PoolingDriver driver = new PoolingDriver();
driver.registerPool(poolUrl, connectionPool);
Connection con = null;
boolean connect = false;
int connectionTests = 0;
// try to connect once to the database to ensure it can be connected to at all
// if the conection cannot be established, multiple attempts will be done to connect
// just in cast the database was not fast enough to start before OpenCms was started
do {
try {
// try to connect
con = connectionFactory.createConnection();
connect = true;
} catch (Exception e) {
// connection failed, increase attempts, sleept for some seconds and log a message
connectionTests++;
if (CmsLog.INIT.isInfoEnabled()) {
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_WAIT_FOR_DB_4,
new Object[] {poolUrl, jdbcUrl, new Integer(connectionTests), new Integer(connetionsWait)}));
}
Thread.sleep(connetionsWait);
} finally {
if (con != null) {
con.close();
}
}
} while (!connect && (connectionTests < connectionAttempts));
if (CmsLog.INIT.isInfoEnabled()) {
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_JDBC_POOL_2, poolUrl, jdbcUrl));
}
return driver;
}
/**
* Returns the database pool name for a given configuration key.<p>
*
* @param configuration the configuration
* @param key a db pool configuration key
* @return the database pool name
*/
public static String getDbPoolName(CmsParameterConfiguration configuration, String key) {
return configuration.get(KEY_DATABASE_POOL + '.' + key + '.' + KEY_POOL_URL);
}
/**
* Returns a list of available database pool names.<p>
*
* @param configuration the configuration to read the pool names from
*
* @return a list of database pool names
*/
public static List<String> getDbPoolUrls(CmsParameterConfiguration configuration) {
List<String> dbPoolNames = new ArrayList<String>();
List<String> driverPoolNames = configuration.getList(CmsDriverManager.CONFIGURATION_DB + ".pools");
for (String driverPoolName : driverPoolNames) {
dbPoolNames.add(getDbPoolName(configuration, driverPoolName));
}
return dbPoolNames;
}
/**
* Returns the name of the default database connection pool.<p>
*
* @return the name of the default database connection pool
*/
public static String getDefaultDbPoolName() {
return OPENCMS_DEFAULT_POOL_NAME;
}
}
|
Fix CmsDbPool: define missing constant
|
src/org/opencms/db/CmsDbPool.java
|
Fix CmsDbPool: define missing constant
|
|
Java
|
lgpl-2.1
|
0766bc40ee54ad0eb9955f80417929e1706cd07e
| 0
|
svn2github/beast-mcmc,svn2github/beast-mcmc,armanbilge/BEAST_sandbox,armanbilge/BEAST_sandbox,armanbilge/BEAST_sandbox,svn2github/beast-mcmc,svn2github/beast-mcmc,armanbilge/BEAST_sandbox,svn2github/beast-mcmc,armanbilge/BEAST_sandbox
|
package test.dr.evomodel.branchratemodel;
import dr.evolution.alignment.SitePatterns;
import dr.evolution.datatype.Nucleotides;
import dr.evolution.util.TaxonList;
import dr.evomodel.branchratemodel.DiscretizedBranchRates;
import dr.evomodel.coalescent.CoalescentLikelihood;
import dr.evomodel.coalescent.ConstantPopulationModel;
import dr.evomodel.operators.ExchangeOperator;
import dr.evomodel.operators.SubtreeSlideOperator;
import dr.evomodel.operators.WilsonBalding;
import dr.evomodel.sitemodel.GammaSiteModel;
import dr.evomodel.substmodel.FrequencyModel;
import dr.evomodel.substmodel.HKY;
import dr.evomodel.tree.RateCovarianceStatistic;
import dr.evomodel.tree.RateStatistic;
import dr.evomodel.treelikelihood.TreeLikelihood;
import dr.evomodelxml.coalescent.ConstantPopulationModelParser;
import dr.evomodelxml.sitemodel.GammaSiteModelParser;
import dr.evomodelxml.substmodel.HKYParser;
import dr.evomodelxml.tree.RateStatisticParser;
import dr.evomodelxml.treelikelihood.TreeLikelihoodParser;
import dr.inference.distribution.ExponentialDistributionModel;
import dr.inference.distribution.LogNormalDistributionModel;
import dr.inference.distribution.ParametricDistributionModel;
import dr.inference.loggers.ArrayLogFormatter;
import dr.inference.loggers.MCLogger;
import dr.inference.loggers.TabDelimitedFormatter;
import dr.inference.mcmc.MCMC;
import dr.inference.mcmc.MCMCOptions;
import dr.inference.model.CompoundLikelihood;
import dr.inference.model.Likelihood;
import dr.inference.model.Parameter;
import dr.inference.operators.*;
import dr.inference.trace.ArrayTraceList;
import dr.inference.trace.Trace;
import dr.inference.trace.TraceCorrelation;
import dr.inferencexml.distribution.DistributionModelParser;
import dr.inferencexml.distribution.LogNormalDistributionModelParser;
import dr.inferencexml.model.CompoundLikelihoodParser;
import dr.math.MathUtils;
import junit.framework.Test;
import junit.framework.TestSuite;
import test.dr.inference.trace.TraceCorrelationAssert;
import java.util.ArrayList;
import java.util.List;
/**
* @author Walter Xie
* convert testUncorrelatedRelaxedClock.xml in the folder /example
*/
public class UncorrelatedRelaxedClockTest extends TraceCorrelationAssert {
private Parameter meanParam;
private Parameter stdevParam;
public UncorrelatedRelaxedClockTest(String name) {
super(name);
}
public void setUp() throws Exception {
super.setUp();
MathUtils.setSeed(666);
createAlignment(DENGUE4_TAXON_SEQUENCE, Nucleotides.INSTANCE);
}
public void testLogNormal() throws Exception {
meanParam = new Parameter.Default(LogNormalDistributionModelParser.MEAN, 2.3E-5, 0, 100.0);
stdevParam = new Parameter.Default(LogNormalDistributionModelParser.STDEV, 0.1, 0, 10.0);
ParametricDistributionModel distributionModel = new LogNormalDistributionModel(meanParam, stdevParam, 0.0, true); // meanInRealSpace="true"
ArrayTraceList traceList = UncorrelatedRelaxedClock(distributionModel);
// <expectation name="posterior" value="-3927.81"/>
// <expectation name="ucld.mean" value="8.28472E-4"/>
// <expectation name="ucld.stdev" value="0.17435"/>
// <expectation name="meanRate" value="8.09909E-4"/>
// <expectation name="coefficientOfVariation" value="0.15982"/>
// <expectation name="covariance" value="-3.81803E-2"/>
// <expectation name="constant.popSize" value="37.3524"/>
// <expectation name="hky.kappa" value="18.3053"/>
// <expectation name="treeModel.rootHeight" value="69.2953"/>
// <expectation name="treeLikelihood" value="-3855.78"/>
// <expectation name="skyline" value="-72.0313"/> ???
TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3927.81);
likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3855.78);
TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
assertExpectation(TREE_HEIGHT, treeHeightStats, 69.2953);
TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
assertExpectation(HKYParser.KAPPA, kappaStats, 18.06518);
TraceCorrelation ucldStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(LogNormalDistributionModelParser.MEAN));
assertExpectation(LogNormalDistributionModelParser.MEAN, ucldStats, 8.0591451486E-4);
ucldStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(LogNormalDistributionModelParser.STDEV));
assertExpectation(LogNormalDistributionModelParser.STDEV, ucldStats, 0.16846023066431434);
TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("meanRate"));
assertExpectation("meanRate", rateStats, 8.010906E-4);
TraceCorrelation coefficientOfVariationStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(RateStatisticParser.COEFFICIENT_OF_VARIATION));
assertExpectation(RateStatisticParser.COEFFICIENT_OF_VARIATION, coefficientOfVariationStats, 0.15982);
TraceCorrelation covarianceStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("covariance"));
assertExpectation("covariance", covarianceStats, -0.0260333026);
TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 37.3524);
TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
assertExpectation("coalescent", coalescentStats, -72.0313);
}
public void testExponential() throws Exception {
meanParam = new Parameter.Default(1.0);
meanParam.setId(DistributionModelParser.MEAN);
stdevParam = null;
ParametricDistributionModel distributionModel = new ExponentialDistributionModel(meanParam); // offset = 0
ArrayTraceList traceList = UncorrelatedRelaxedClock(distributionModel);
TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3958.7409);
// System.out.println("likelihoodStats = " + likelihoodStats.getMean());
likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3885.26939);
// System.out.println("treelikelihoodStats = " + likelihoodStats.getMean());
TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
assertExpectation(TREE_HEIGHT, treeHeightStats, 84.3529526);
TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
assertExpectation(HKYParser.KAPPA, kappaStats, 18.38065);
TraceCorrelation ucedStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(DistributionModelParser.MEAN));
assertExpectation(DistributionModelParser.MEAN, ucedStats, 0.0019344134887784579);
// System.out.println("ucedStats = " + ucedStats.getMean());
TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("meanRate"));
assertExpectation("meanRate", rateStats, 0.0020538802366337084);
// System.out.println("rateStats = " + rateStats.getMean());
TraceCorrelation coefficientOfVariationStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(RateStatisticParser.COEFFICIENT_OF_VARIATION));
assertExpectation(RateStatisticParser.COEFFICIENT_OF_VARIATION, coefficientOfVariationStats, 0.773609960455);
// System.out.println("coefficientOfVariationStats = " + coefficientOfVariationStats.getMean());
TraceCorrelation covarianceStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("covariance"));
assertExpectation("covariance", covarianceStats, -0.07042030641301375);
// System.out.println("covarianceStats = " + covarianceStats.getMean());
TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 43.4478);
// System.out.println("popStats = " + popStats.getMean());
TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
assertExpectation("coalescent", coalescentStats, -73.4715);
// System.out.println("coalescentStats = " + coalescentStats.getMean());
}
private ArrayTraceList UncorrelatedRelaxedClock(ParametricDistributionModel distributionModel) throws Exception {
Parameter popSize = new Parameter.Default(ConstantPopulationModelParser.POPULATION_SIZE, 380.0, 0, 38000.0);
ConstantPopulationModel constantModel = createRandomInitialTree(popSize);
CoalescentLikelihood coalescent = new CoalescentLikelihood(treeModel, null, new ArrayList<TaxonList>(), constantModel);
coalescent.setId("coalescent");
// clock model
Parameter rateCategoryParameter = new Parameter.Default(32);
rateCategoryParameter.setId(DiscretizedBranchRates.BRANCH_RATES);
DiscretizedBranchRates branchRateModel = new DiscretizedBranchRates(treeModel, rateCategoryParameter,
distributionModel, 1, false, Double.NaN);
RateStatistic meanRate = new RateStatistic("meanRate", treeModel, branchRateModel, true, true, RateStatisticParser.MEAN);
RateStatistic coefficientOfVariation = new RateStatistic(RateStatisticParser.COEFFICIENT_OF_VARIATION, treeModel, branchRateModel,
true, true, RateStatisticParser.COEFFICIENT_OF_VARIATION);
RateCovarianceStatistic covariance = new RateCovarianceStatistic("covariance", treeModel, branchRateModel);
// Sub model
Parameter freqs = new Parameter.Default(alignment.getStateFrequencies());
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100.0);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
GammaSiteModel siteModel = new GammaSiteModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteModel.setMutationRateParameter(mu);
//treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, branchRateModel, null,
false, false, true, false, false);
treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
// Operators
OperatorSchedule schedule = new SimpleOperatorSchedule();
MCMCOperator operator = new ScaleOperator(kappa, 0.75);
operator.setWeight(1.0);
schedule.addOperator(operator);
operator = new ScaleOperator(meanParam, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
if (stdevParam != null) {
operator = new ScaleOperator(stdevParam, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
}
Parameter allInternalHeights = treeModel.createNodeHeightsParameter(true, true, false);
operator = new UpDownOperator(new Scalable[]{new Scalable.Default(meanParam)},
new Scalable[] {new Scalable.Default(allInternalHeights)}, 0.75, 3.0, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
operator = new SwapOperator(rateCategoryParameter, 10);
operator.setWeight(1.0);
schedule.addOperator(operator);
operator = new RandomWalkIntegerOperator(rateCategoryParameter, 1, 10.0);
schedule.addOperator(operator);
operator = new UniformIntegerOperator(rateCategoryParameter, (int) (double)rateCategoryParameter.getBounds().getLowerLimit(0),
(int) (double)rateCategoryParameter.getBounds().getUpperLimit(0), 10.0);
schedule.addOperator(operator);
operator = new ScaleOperator(popSize, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter rootHeight = treeModel.getRootHeightParameter();
rootHeight.setId(TREE_HEIGHT);
operator = new ScaleOperator(rootHeight, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter internalHeights = treeModel.createNodeHeightsParameter(false, true, false);
operator = new UniformOperator(internalHeights, 30.0);
schedule.addOperator(operator);
operator = new SubtreeSlideOperator(treeModel, 15.0, 38.0, true, false, false, false, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 15.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new WilsonBalding(treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
//CompoundLikelihood
List<Likelihood> likelihoods = new ArrayList<Likelihood>();
likelihoods.add(coalescent);
Likelihood prior = new CompoundLikelihood(0, likelihoods);
prior.setId(CompoundLikelihoodParser.PRIOR);
likelihoods.clear();
likelihoods.add(treeLikelihood);
Likelihood likelihood = new CompoundLikelihood(-1, likelihoods);
likelihoods.clear();
likelihoods.add(prior);
likelihoods.add(likelihood);
Likelihood posterior = new CompoundLikelihood(0, likelihoods);
posterior.setId(CompoundLikelihoodParser.POSTERIOR);
// Log
ArrayLogFormatter formatter = new ArrayLogFormatter(false);
MCLogger[] loggers = new MCLogger[2];
loggers[0] = new MCLogger(formatter, 10000, false);
loggers[0].add(posterior);
loggers[0].add(treeLikelihood);
loggers[0].add(rootHeight);
loggers[0].add(meanParam);
if (stdevParam != null) loggers[0].add(stdevParam);
loggers[0].add(meanRate);
loggers[0].add(coefficientOfVariation);
loggers[0].add(covariance);
loggers[0].add(popSize);
loggers[0].add(kappa);
loggers[0].add(coalescent);
loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 100000, false);
loggers[1].add(posterior);
loggers[1].add(treeLikelihood);
loggers[1].add(rootHeight);
loggers[1].add(meanRate);
loggers[1].add(coalescent);
// MCMC
MCMC mcmc = new MCMC("mcmc1");
MCMCOptions options = new MCMCOptions();
options.setChainLength(10000000);
options.setUseCoercion(true); // autoOptimize = true
options.setCoercionDelay(100);
options.setTemperature(1.0);
options.setFullEvaluationCount(2000);
mcmc.setShowOperatorAnalysis(true);
mcmc.init(options, posterior, schedule, loggers);
mcmc.run();
// time
System.out.println(mcmc.getTimer().toString());
// Tracer
List<Trace> traces = formatter.getTraces();
ArrayTraceList traceList = new ArrayTraceList("RandomLocalClockTest", traces, 0);
for (int i = 1; i < traces.size(); i++) {
traceList.analyseTrace(i);
}
return traceList;
}
public static Test suite() {
return new TestSuite(UncorrelatedRelaxedClockTest.class);
}
}
|
src/test/dr/evomodel/branchratemodel/UncorrelatedRelaxedClockTest.java
|
package test.dr.evomodel.branchratemodel;
import dr.evolution.alignment.SitePatterns;
import dr.evolution.datatype.Nucleotides;
import dr.evolution.util.TaxonList;
import dr.evomodel.branchratemodel.DiscretizedBranchRates;
import dr.evomodel.coalescent.CoalescentLikelihood;
import dr.evomodel.coalescent.ConstantPopulationModel;
import dr.evomodel.operators.ExchangeOperator;
import dr.evomodel.operators.SubtreeSlideOperator;
import dr.evomodel.operators.WilsonBalding;
import dr.evomodel.sitemodel.GammaSiteModel;
import dr.evomodel.substmodel.FrequencyModel;
import dr.evomodel.substmodel.HKY;
import dr.evomodel.tree.RateCovarianceStatistic;
import dr.evomodel.tree.RateStatistic;
import dr.evomodel.treelikelihood.TreeLikelihood;
import dr.evomodelxml.coalescent.ConstantPopulationModelParser;
import dr.evomodelxml.sitemodel.GammaSiteModelParser;
import dr.evomodelxml.substmodel.HKYParser;
import dr.evomodelxml.tree.RateStatisticParser;
import dr.evomodelxml.treelikelihood.TreeLikelihoodParser;
import dr.inference.distribution.ExponentialDistributionModel;
import dr.inference.distribution.LogNormalDistributionModel;
import dr.inference.distribution.ParametricDistributionModel;
import dr.inference.loggers.ArrayLogFormatter;
import dr.inference.loggers.MCLogger;
import dr.inference.loggers.TabDelimitedFormatter;
import dr.inference.mcmc.MCMC;
import dr.inference.mcmc.MCMCOptions;
import dr.inference.model.CompoundLikelihood;
import dr.inference.model.Likelihood;
import dr.inference.model.Parameter;
import dr.inference.operators.*;
import dr.inference.trace.ArrayTraceList;
import dr.inference.trace.Trace;
import dr.inference.trace.TraceCorrelation;
import dr.inferencexml.distribution.DistributionModelParser;
import dr.inferencexml.distribution.LogNormalDistributionModelParser;
import dr.inferencexml.model.CompoundLikelihoodParser;
import dr.math.MathUtils;
import junit.framework.Test;
import junit.framework.TestSuite;
import test.dr.inference.trace.TraceCorrelationAssert;
import java.util.ArrayList;
import java.util.List;
/**
* @author Walter Xie
* convert testUncorrelatedRelaxedClock.xml in the folder /example
*/
public class UncorrelatedRelaxedClockTest extends TraceCorrelationAssert {
private Parameter meanParam;
private Parameter stdevParam;
public UncorrelatedRelaxedClockTest(String name) {
super(name);
}
public void setUp() throws Exception {
super.setUp();
MathUtils.setSeed(666);
createAlignment(DENGUE4_TAXON_SEQUENCE, Nucleotides.INSTANCE);
}
public void testLogNormal() throws Exception {
meanParam = new Parameter.Default(LogNormalDistributionModelParser.MEAN, 2.3E-5, 0, 100.0);
stdevParam = new Parameter.Default(LogNormalDistributionModelParser.STDEV, 0.1, 0, 10.0);
ParametricDistributionModel distributionModel = new LogNormalDistributionModel(meanParam, stdevParam, 0.0, true); // meanInRealSpace="true"
ArrayTraceList traceList = UncorrelatedRelaxedClock(distributionModel);
// <expectation name="posterior" value="-3927.81"/>
// <expectation name="ucld.mean" value="8.28472E-4"/>
// <expectation name="ucld.stdev" value="0.17435"/>
// <expectation name="meanRate" value="8.09909E-4"/>
// <expectation name="coefficientOfVariation" value="0.15982"/>
// <expectation name="covariance" value="-3.81803E-2"/>
// <expectation name="constant.popSize" value="37.3524"/>
// <expectation name="hky.kappa" value="18.3053"/>
// <expectation name="treeModel.rootHeight" value="69.2953"/>
// <expectation name="treeLikelihood" value="-3855.78"/>
// <expectation name="skyline" value="-72.0313"/> ???
TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3927.81);
likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3855.78);
TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
assertExpectation(TREE_HEIGHT, treeHeightStats, 69.2953);
TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
assertExpectation(HKYParser.KAPPA, kappaStats, 18.3053);
TraceCorrelation ucldStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(LogNormalDistributionModelParser.MEAN));
assertExpectation(LogNormalDistributionModelParser.MEAN, ucldStats, 8.18686E-4);
ucldStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(LogNormalDistributionModelParser.STDEV));
assertExpectation(LogNormalDistributionModelParser.STDEV, ucldStats, 0.16846023066431434);
TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("meanRate"));
assertExpectation("meanRate", rateStats, 8.010906E-4);
TraceCorrelation coefficientOfVariationStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(RateStatisticParser.COEFFICIENT_OF_VARIATION));
assertExpectation(RateStatisticParser.COEFFICIENT_OF_VARIATION, coefficientOfVariationStats, 0.15982);
TraceCorrelation covarianceStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("covariance"));
assertExpectation("covariance", covarianceStats, -3.81803E-2);
TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 37.3524);
TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
assertExpectation("coalescent", coalescentStats, -72.0313);
}
public void testExponential() throws Exception {
meanParam = new Parameter.Default(1.0);
meanParam.setId(DistributionModelParser.MEAN);
stdevParam = null;
ParametricDistributionModel distributionModel = new ExponentialDistributionModel(meanParam); // offset = 0
ArrayTraceList traceList = UncorrelatedRelaxedClock(distributionModel);
TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3958.7409);
// System.out.println("likelihoodStats = " + likelihoodStats.getMean());
likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3885.26939);
// System.out.println("treelikelihoodStats = " + likelihoodStats.getMean());
TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
assertExpectation(TREE_HEIGHT, treeHeightStats, 84.3529526);
TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
assertExpectation(HKYParser.KAPPA, kappaStats, 18.38065);
TraceCorrelation ucedStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(DistributionModelParser.MEAN));
assertExpectation(DistributionModelParser.MEAN, ucedStats, 0.0019344134887784579);
// System.out.println("ucedStats = " + ucedStats.getMean());
TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("meanRate"));
assertExpectation("meanRate", rateStats, 0.0020538802366337084);
// System.out.println("rateStats = " + rateStats.getMean());
TraceCorrelation coefficientOfVariationStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(RateStatisticParser.COEFFICIENT_OF_VARIATION));
assertExpectation(RateStatisticParser.COEFFICIENT_OF_VARIATION, coefficientOfVariationStats, 0.7462766945263386);
// System.out.println("coefficientOfVariationStats = " + coefficientOfVariationStats.getMean());
TraceCorrelation covarianceStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("covariance"));
assertExpectation("covariance", covarianceStats, -0.07042030641301375);
// System.out.println("covarianceStats = " + covarianceStats.getMean());
TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 43.4478);
// System.out.println("popStats = " + popStats.getMean());
TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
assertExpectation("coalescent", coalescentStats, -73.4715);
// System.out.println("coalescentStats = " + coalescentStats.getMean());
}
private ArrayTraceList UncorrelatedRelaxedClock(ParametricDistributionModel distributionModel) throws Exception {
Parameter popSize = new Parameter.Default(ConstantPopulationModelParser.POPULATION_SIZE, 380.0, 0, 38000.0);
ConstantPopulationModel constantModel = createRandomInitialTree(popSize);
CoalescentLikelihood coalescent = new CoalescentLikelihood(treeModel, null, new ArrayList<TaxonList>(), constantModel);
coalescent.setId("coalescent");
// clock model
Parameter rateCategoryParameter = new Parameter.Default(32);
rateCategoryParameter.setId(DiscretizedBranchRates.BRANCH_RATES);
DiscretizedBranchRates branchRateModel = new DiscretizedBranchRates(treeModel, rateCategoryParameter,
distributionModel, 1, false, Double.NaN);
RateStatistic meanRate = new RateStatistic("meanRate", treeModel, branchRateModel, true, true, RateStatisticParser.MEAN);
RateStatistic coefficientOfVariation = new RateStatistic(RateStatisticParser.COEFFICIENT_OF_VARIATION, treeModel, branchRateModel,
true, true, RateStatisticParser.COEFFICIENT_OF_VARIATION);
RateCovarianceStatistic covariance = new RateCovarianceStatistic("covariance", treeModel, branchRateModel);
// Sub model
Parameter freqs = new Parameter.Default(alignment.getStateFrequencies());
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100.0);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
GammaSiteModel siteModel = new GammaSiteModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteModel.setMutationRateParameter(mu);
//treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, branchRateModel, null,
false, false, true, false, false);
treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
// Operators
OperatorSchedule schedule = new SimpleOperatorSchedule();
MCMCOperator operator = new ScaleOperator(kappa, 0.75);
operator.setWeight(1.0);
schedule.addOperator(operator);
operator = new ScaleOperator(meanParam, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
if (stdevParam != null) {
operator = new ScaleOperator(stdevParam, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
}
Parameter allInternalHeights = treeModel.createNodeHeightsParameter(true, true, false);
operator = new UpDownOperator(new Scalable[]{new Scalable.Default(meanParam)},
new Scalable[] {new Scalable.Default(allInternalHeights)}, 0.75, 3.0, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
operator = new SwapOperator(rateCategoryParameter, 10);
operator.setWeight(1.0);
schedule.addOperator(operator);
operator = new RandomWalkIntegerOperator(rateCategoryParameter, 1, 10.0);
schedule.addOperator(operator);
operator = new UniformIntegerOperator(rateCategoryParameter, (int) (double)rateCategoryParameter.getBounds().getLowerLimit(0),
(int) (double)rateCategoryParameter.getBounds().getUpperLimit(0), 10.0);
schedule.addOperator(operator);
operator = new ScaleOperator(popSize, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter rootHeight = treeModel.getRootHeightParameter();
rootHeight.setId(TREE_HEIGHT);
operator = new ScaleOperator(rootHeight, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter internalHeights = treeModel.createNodeHeightsParameter(false, true, false);
operator = new UniformOperator(internalHeights, 30.0);
schedule.addOperator(operator);
operator = new SubtreeSlideOperator(treeModel, 15.0, 38.0, true, false, false, false, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 15.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new WilsonBalding(treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
//CompoundLikelihood
List<Likelihood> likelihoods = new ArrayList<Likelihood>();
likelihoods.add(coalescent);
Likelihood prior = new CompoundLikelihood(0, likelihoods);
prior.setId(CompoundLikelihoodParser.PRIOR);
likelihoods.clear();
likelihoods.add(treeLikelihood);
Likelihood likelihood = new CompoundLikelihood(-1, likelihoods);
likelihoods.clear();
likelihoods.add(prior);
likelihoods.add(likelihood);
Likelihood posterior = new CompoundLikelihood(0, likelihoods);
posterior.setId(CompoundLikelihoodParser.POSTERIOR);
// Log
ArrayLogFormatter formatter = new ArrayLogFormatter(false);
MCLogger[] loggers = new MCLogger[2];
loggers[0] = new MCLogger(formatter, 10000, false);
loggers[0].add(posterior);
loggers[0].add(treeLikelihood);
loggers[0].add(rootHeight);
loggers[0].add(meanParam);
if (stdevParam != null) loggers[0].add(stdevParam);
loggers[0].add(meanRate);
loggers[0].add(coefficientOfVariation);
loggers[0].add(covariance);
loggers[0].add(popSize);
loggers[0].add(kappa);
loggers[0].add(coalescent);
loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 100000, false);
loggers[1].add(posterior);
loggers[1].add(treeLikelihood);
loggers[1].add(rootHeight);
loggers[1].add(meanRate);
loggers[1].add(coalescent);
// MCMC
MCMC mcmc = new MCMC("mcmc1");
MCMCOptions options = new MCMCOptions();
options.setChainLength(10000000);
options.setUseCoercion(true); // autoOptimize = true
options.setCoercionDelay(100);
options.setTemperature(1.0);
options.setFullEvaluationCount(2000);
mcmc.setShowOperatorAnalysis(true);
mcmc.init(options, posterior, schedule, loggers);
mcmc.run();
// time
System.out.println(mcmc.getTimer().toString());
// Tracer
List<Trace> traces = formatter.getTraces();
ArrayTraceList traceList = new ArrayTraceList("RandomLocalClockTest", traces, 0);
for (int i = 1; i < traces.size(); i++) {
traceList.analyseTrace(i);
}
return traceList;
}
public static Test suite() {
return new TestSuite(UncorrelatedRelaxedClockTest.class);
}
}
|
fix JUnit test as Jessie advised
git-svn-id: 67bc77c75b8364e4e9cdff0eb6560f5818674cd8@3924 ca793f91-a31e-0410-b540-2769d408b6a1
|
src/test/dr/evomodel/branchratemodel/UncorrelatedRelaxedClockTest.java
|
fix JUnit test as Jessie advised
|
|
Java
|
unlicense
|
f3c7b9acce854eaf04d18a561abc3844d01c6bea
| 0
|
ferreusveritas/Growing-Trees
|
package com.ferreusveritas.dynamictrees.render;
import com.ferreusveritas.dynamictrees.api.TreeHelper;
import com.ferreusveritas.dynamictrees.blocks.BlockBranch;
import com.ferreusveritas.dynamictrees.entities.EntityFallingTree;
import com.ferreusveritas.dynamictrees.util.BranchDestructionData;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.math.Vec3d;
import net.minecraft.world.World;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
/**
*
* This class hold different animation handlers for EntityFallingTree.
* The idea is that a unique animation could be used for a certain circumstance.
*
* @author ferreusveritas
*
*/
public class AnimationHandlers {
public static final AnimationHandler voidAnimationHandler = new AnimationHandler() {
@Override public boolean shouldDie(EntityFallingTree entity) { return true; }
@Override public void renderTransform(EntityFallingTree entity, float entityYaw, float partialTicks) { }
@Override public void initMotion(EntityFallingTree entity) { }
@Override public void handleMotion(EntityFallingTree entity) { }
@Override public void dropPayload(EntityFallingTree entity) { EntityFallingTree.standardDropPayload(entity); }
};
public static final AnimationHandler defaultAnimationHandler = new AnimationHandler() {
@Override
public void initMotion(EntityFallingTree entity) {
entity.motionY = 0.4;
entity.motionX = 0.2 * (entity.world.rand.nextFloat() - 0.5f);
entity.motionZ = 0.2 * (entity.world.rand.nextFloat() - 0.5f);
float mass = entity.getDestroyData().woodVolume;
float inertialMass = MathHelper.clamp(mass / 2048, 1, 3);
entity.motionX /= inertialMass;
entity.motionY /= inertialMass;
entity.motionZ /= inertialMass;
}
@Override
public void handleMotion(EntityFallingTree entity) {
//This will function as an inaccurate moment of inertia for the time being
float mass = entity.getDestroyData().woodVolume;
float inertialMass = MathHelper.clamp(mass / 2048, 1, 3);
entity.motionY -= 0.02;//Gravity
//entity.motionY = 0.0;
entity.posX += entity.motionX;
entity.posY += entity.motionY;
entity.posZ += entity.motionZ;
entity.rotationYaw += 1.25 / inertialMass;
entity.rotationPitch += 4 / inertialMass;
entity.rotationPitch = MathHelper.wrapDegrees(entity.rotationPitch);
entity.rotationYaw = MathHelper.wrapDegrees(entity.rotationYaw);
}
@Override
public void dropPayload(EntityFallingTree entity) {
EntityFallingTree.standardDropPayload(entity);
}
public boolean shouldDie(EntityFallingTree entity) {
return entity.ticksExisted > 25;
}
@Override
@SideOnly(Side.CLIENT)
public void renderTransform(EntityFallingTree entity, float entityYaw, float partialTicks) {
float yaw = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationYaw, entity.rotationYaw, partialTicks));
float pit = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationPitch, entity.rotationPitch, partialTicks));
Vec3d mc = entity.getMassCenter();
GlStateManager.translate(mc.x, mc.y, mc.z);
GlStateManager.rotate(-yaw, 0, 1, 0);
GlStateManager.rotate(pit, 1, 0, 0);
GlStateManager.translate(-mc.x - 0.5, -mc.y, -mc.z - 0.5);
}
};
public static final AnimationHandler demoAnimationHandler = new AnimationHandler() {
@Override
public void initMotion(EntityFallingTree entity) { }
@Override
public void handleMotion(EntityFallingTree entity) {
entity.rotationYaw += 6;
entity.rotationPitch += 2;
entity.rotationPitch = MathHelper.wrapDegrees(entity.rotationPitch);
entity.rotationYaw = MathHelper.wrapDegrees(entity.rotationYaw);
}
@Override
public void dropPayload(EntityFallingTree entity) { }
@Override
public boolean shouldDie(EntityFallingTree entity) {
return false;
}
@Override
public void renderTransform(EntityFallingTree entity, float entityYaw, float partialTicks) {
float yaw = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationYaw, entity.rotationYaw, partialTicks));
float pit = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationPitch, entity.rotationPitch, partialTicks));
Vec3d mc = entity.getMassCenter();
GlStateManager.translate(mc.x, mc.y, mc.z);
GlStateManager.rotate(-yaw, 0, 1, 0);
GlStateManager.rotate(pit, 1, 0, 0);
GlStateManager.translate(-mc.x - 0.5, -mc.y, -mc.z - 0.5);
}
};
public static final AnimationHandler falloverAnimationHandler = new AnimationHandler() {
class HandlerData extends AnimationHandlerData {
boolean landed = false;
}
HandlerData getData(EntityFallingTree entity) {
return entity.animationHandlerData instanceof HandlerData ? (HandlerData) entity.animationHandlerData : new HandlerData();
}
@Override
public void initMotion(EntityFallingTree entity) {
entity.animationHandlerData = new HandlerData();
BlockPos belowBlock = entity.getDestroyData().cutPos.down();
if(entity.world.getBlockState(belowBlock).isSideSolid(entity.world, belowBlock, EnumFacing.UP)) {
getData(entity).landed = true;
return;
}
}
@Override
public void handleMotion(EntityFallingTree entity) {
BranchDestructionData destroyData = entity.getDestroyData();
if(getData(entity).landed) {
EnumFacing toolDir = destroyData.toolDir;
float height = (float) entity.getMassCenter().y;
float fallSpeed = height >= 1.5f ? entity.ticksExisted / (8.0f * height) : 4.0f;
switch(toolDir) {
case NORTH: entity.rotationPitch += fallSpeed; break;
case SOUTH: entity.rotationPitch -= fallSpeed; break;
case WEST: entity.rotationYaw += fallSpeed; break;
case EAST: entity.rotationYaw -= fallSpeed; break;
default: break;
}
entity.rotationPitch = MathHelper.wrapDegrees(entity.rotationPitch);
entity.rotationYaw = MathHelper.wrapDegrees(entity.rotationYaw);
}
entity.motionY -= 0.03f;
entity.posY += entity.motionY;
World world = entity.world;
int radius = 8;
IBlockState state = entity.getDestroyData().getBranchBlockState(0);
if(TreeHelper.isBranch(state)) {
radius = ((BlockBranch)state.getBlock()).getRadius(state);
}
AxisAlignedBB fallBox = new AxisAlignedBB(entity.posX - radius, entity.posY, entity.posZ - radius, entity.posX + radius, entity.posY + 1.0, entity.posZ + radius);
BlockPos pos = new BlockPos(entity.posX, entity.posY, entity.posZ);
IBlockState collState = world.getBlockState(pos);
AxisAlignedBB collBox = collState.getCollisionBoundingBox(world, pos);
if(collBox != null) {
collBox = collBox.offset(pos);
if(fallBox.intersects(collBox)) {
entity.motionY = 0;
entity.posY = collBox.maxY;
entity.prevPosY = entity.posY;
getData(entity).landed = true;
}
}
}
@Override
public void dropPayload(EntityFallingTree entity) {
EntityFallingTree.standardDropPayload(entity);
}
@Override
public boolean shouldDie(EntityFallingTree entity) {
return Math.abs(entity.rotationPitch) >= 90 ||
Math.abs(entity.rotationYaw) >= 90 ||
entity.ticksExisted > 120;
}
@Override
public void renderTransform(EntityFallingTree entity, float entityYaw, float partialTicks) {
float yaw = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationYaw, entity.rotationYaw, partialTicks));
float pit = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationPitch, entity.rotationPitch, partialTicks));
//Vec3d mc = entity.getMassCenter();
int radius = entity.getDestroyData().getBranchRadius(0);
EnumFacing toolDir = entity.getDestroyData().toolDir;
Vec3d toolVec = new Vec3d(toolDir.getFrontOffsetX(), toolDir.getFrontOffsetY(), toolDir.getFrontOffsetZ()).scale(radius / 16.0f);
GlStateManager.translate(-toolVec.x, -toolVec.y, -toolVec.z);
GlStateManager.rotate(-yaw, 0, 0, 1);
GlStateManager.rotate(pit, 1, 0, 0);
GlStateManager.translate(toolVec.x, toolVec.y, toolVec.z);
GlStateManager.translate(-0.5, 0, -0.5);
}
};
}
|
src/main/java/com/ferreusveritas/dynamictrees/render/AnimationHandlers.java
|
package com.ferreusveritas.dynamictrees.render;
import com.ferreusveritas.dynamictrees.entities.EntityFallingTree;
import com.ferreusveritas.dynamictrees.util.BranchDestructionData;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.math.Vec3d;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
/**
*
* This class hold different animation handlers for EntityFallingTree.
* The idea is that a unique animation could be used for a certain circumstance.
*
* @author ferreusveritas
*
*/
public class AnimationHandlers {
public static final AnimationHandler voidAnimationHandler = new AnimationHandler() {
@Override public boolean shouldDie(EntityFallingTree entity) { return true; }
@Override public void renderTransform(EntityFallingTree entity, float entityYaw, float partialTicks) { }
@Override public void initMotion(EntityFallingTree entity) { }
@Override public void handleMotion(EntityFallingTree entity) { }
@Override public void dropPayload(EntityFallingTree entity) { EntityFallingTree.standardDropPayload(entity); }
};
public static final AnimationHandler defaultAnimationHandler = new AnimationHandler() {
@Override
public void initMotion(EntityFallingTree entity) {
entity.motionY = 0.4;
entity.motionX = 0.2 * (entity.world.rand.nextFloat() - 0.5f);
entity.motionZ = 0.2 * (entity.world.rand.nextFloat() - 0.5f);
float mass = entity.getDestroyData().woodVolume;
float inertia = (512 / mass);
entity.motionX *= inertia;
entity.motionY *= inertia;
entity.motionZ *= inertia;
entity.motionX = MathHelper.clamp(entity.motionX, 0.0f, 0.6f);
entity.motionY = MathHelper.clamp(entity.motionY, 0.2f, 0.6f);
entity.motionZ = MathHelper.clamp(entity.motionZ, 0.0f, 0.6f);
}
@Override
public void handleMotion(EntityFallingTree entity) {
entity.motionY -= 0.02;//Gravity
//entity.motionY = 0.0;
entity.posX += entity.motionX;
entity.posY += entity.motionY;
entity.posZ += entity.motionZ;
entity.rotationYaw += 1.25;
entity.rotationPitch += 4;
entity.rotationPitch = MathHelper.wrapDegrees(entity.rotationPitch);
entity.rotationYaw = MathHelper.wrapDegrees(entity.rotationYaw);
}
@Override
public void dropPayload(EntityFallingTree entity) {
EntityFallingTree.standardDropPayload(entity);
}
public boolean shouldDie(EntityFallingTree entity) {
return entity.ticksExisted > 25;
}
@Override
@SideOnly(Side.CLIENT)
public void renderTransform(EntityFallingTree entity, float entityYaw, float partialTicks) {
float yaw = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationYaw, entity.rotationYaw, partialTicks));
float pit = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationPitch, entity.rotationPitch, partialTicks));
Vec3d mc = entity.getMassCenter();
GlStateManager.translate(mc.x, mc.y, mc.z);
GlStateManager.rotate(-yaw, 0, 1, 0);
GlStateManager.rotate(pit, 1, 0, 0);
GlStateManager.translate(-mc.x - 0.5, -mc.y, -mc.z - 0.5);
}
};
public static final AnimationHandler demoAnimationHandler = new AnimationHandler() {
@Override
public void initMotion(EntityFallingTree entity) { }
@Override
public void handleMotion(EntityFallingTree entity) {
entity.rotationYaw += 6;
entity.rotationPitch += 2;
entity.rotationPitch = MathHelper.wrapDegrees(entity.rotationPitch);
entity.rotationYaw = MathHelper.wrapDegrees(entity.rotationYaw);
}
@Override
public void dropPayload(EntityFallingTree entity) { }
@Override
public boolean shouldDie(EntityFallingTree entity) {
return false;
}
@Override
public void renderTransform(EntityFallingTree entity, float entityYaw, float partialTicks) {
float yaw = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationYaw, entity.rotationYaw, partialTicks));
float pit = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationPitch, entity.rotationPitch, partialTicks));
Vec3d mc = entity.getMassCenter();
GlStateManager.translate(mc.x, mc.y, mc.z);
GlStateManager.rotate(-yaw, 0, 1, 0);
GlStateManager.rotate(pit, 1, 0, 0);
GlStateManager.translate(-mc.x - 0.5, -mc.y, -mc.z - 0.5);
}
};
public static final AnimationHandler falloverAnimationHandler = new AnimationHandler() {
@Override
public void initMotion(EntityFallingTree entity) { }
@Override
public void handleMotion(EntityFallingTree entity) {
BranchDestructionData destroyData = entity.getDestroyData();
EnumFacing toolDir = destroyData.toolDir;
float height = (float) entity.getMassCenter().y;
float fallSpeed = height >= 1.5f ? entity.ticksExisted / (8.0f * height) : 4.0f;
switch(toolDir) {
case NORTH: entity.rotationPitch += fallSpeed; break;
case SOUTH: entity.rotationPitch -= fallSpeed; break;
case WEST: entity.rotationYaw += fallSpeed; break;
case EAST: entity.rotationYaw -= fallSpeed; break;
default: break;
}
entity.rotationPitch = MathHelper.wrapDegrees(entity.rotationPitch);
entity.rotationYaw = MathHelper.wrapDegrees(entity.rotationYaw);
}
@Override
public void dropPayload(EntityFallingTree entity) {
EntityFallingTree.standardDropPayload(entity);
}
@Override
public boolean shouldDie(EntityFallingTree entity) {
return Math.abs(entity.rotationPitch) >= 90 ||
Math.abs(entity.rotationYaw) >= 90 ||
entity.ticksExisted > 120;
}
@Override
public void renderTransform(EntityFallingTree entity, float entityYaw, float partialTicks) {
float yaw = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationYaw, entity.rotationYaw, partialTicks));
float pit = MathHelper.wrapDegrees(com.ferreusveritas.dynamictrees.util.MathHelper.angleDegreesInterpolate(entity.prevRotationPitch, entity.rotationPitch, partialTicks));
//Vec3d mc = entity.getMassCenter();
int radius = entity.getDestroyData().getBranchRadius(0);
EnumFacing toolDir = entity.getDestroyData().toolDir;
Vec3d toolVec = new Vec3d(toolDir.getFrontOffsetX(), toolDir.getFrontOffsetY(), toolDir.getFrontOffsetZ()).scale(radius / 16.0f);
GlStateManager.translate(-toolVec.x, -toolVec.y, -toolVec.z);
GlStateManager.rotate(-yaw, 0, 0, 1);
GlStateManager.rotate(pit, 1, 0, 0);
GlStateManager.translate(toolVec.x, toolVec.y, toolVec.z);
GlStateManager.translate(-0.5, 0, -0.5);
}
};
}
|
Holy crap it's kinda working
|
src/main/java/com/ferreusveritas/dynamictrees/render/AnimationHandlers.java
|
Holy crap it's kinda working
|
|
Java
|
apache-2.0
|
f3c4667d9de5ba6ba6616d79be1df14bdc4b7b55
| 0
|
mathieufortin01/pdfbox,benmccann/pdfbox,joansmith/pdfbox,benmccann/pdfbox,mathieufortin01/pdfbox,gavanx/pdflearn,joansmith/pdfbox,gavanx/pdflearn
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.examples.util;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.text.PDFTextStripperByArea;
import java.awt.Rectangle;
import java.io.File;
import java.io.IOException;
/**
* This is an example on how to extract text from a specific area on the PDF document.
*
* Usage: java org.apache.pdfbox.examples.util.ExtractTextByArea <input-pdf>
*
* @author Ben Litchfield
*/
public final class ExtractTextByArea
{
private ExtractTextByArea()
{
//utility class and should not be constructed.
}
/**
* This will print the documents text in a certain area.
*
* @param args The command line arguments.
*
* @throws IOException If there is an error parsing the document.
*/
public static void main( String[] args ) throws IOException
{
if( args.length != 1 )
{
usage();
}
else
{
PDDocument document = null;
try
{
document = PDDocument.load( new File(args[0]) );
PDFTextStripperByArea stripper = new PDFTextStripperByArea();
stripper.setSortByPosition( true );
Rectangle rect = new Rectangle( 10, 280, 275, 60 );
stripper.addRegion( "class1", rect );
PDPage firstPage = document.getPage(0);
stripper.extractRegions( firstPage );
System.out.println( "Text in the area:" + rect );
System.out.println( stripper.getTextForRegion( "class1" ) );
}
finally
{
if( document != null )
{
document.close();
}
}
}
}
/**
* This will print the usage for this document.
*/
private static void usage()
{
System.err.println( "Usage: java org.apache.pdfbox.examples.util.ExtractTextByArea <input-pdf>" );
}
}
|
examples/src/main/java/org/apache/pdfbox/examples/util/ExtractTextByArea.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.examples.util;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.text.PDFTextStripperByArea;
import java.awt.Rectangle;
import java.io.File;
import java.io.IOException;
/**
* This is an example on how to extract text from a specific area on the PDF document.
*
* Usage: java org.apache.pdfbox.examples.util.ExtractTextByArea <input-pdf>
*
* @author Ben Litchfield
*/
public class ExtractTextByArea
{
private ExtractTextByArea()
{
//utility class and should not be constructed.
}
/**
* This will print the documents text in a certain area.
*
* @param args The command line arguments.
*
* @throws IOException If there is an error parsing the document.
*/
public static void main( String[] args ) throws IOException
{
if( args.length != 1 )
{
usage();
}
else
{
PDDocument document = null;
try
{
document = PDDocument.load( new File(args[0]) );
PDFTextStripperByArea stripper = new PDFTextStripperByArea();
stripper.setSortByPosition( true );
Rectangle rect = new Rectangle( 10, 280, 275, 60 );
stripper.addRegion( "class1", rect );
PDPage firstPage = document.getPage(0);
stripper.extractRegions( firstPage );
System.out.println( "Text in the area:" + rect );
System.out.println( stripper.getTextForRegion( "class1" ) );
}
finally
{
if( document != null )
{
document.close();
}
}
}
}
/**
* This will print the usage for this document.
*/
private static void usage()
{
System.err.println( "Usage: java org.apache.pdfbox.examples.util.ExtractTextByArea <input-pdf>" );
}
}
|
PDFBOX-2852: add private constructor, make class final
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1703257 13f79535-47bb-0310-9956-ffa450edef68
|
examples/src/main/java/org/apache/pdfbox/examples/util/ExtractTextByArea.java
|
PDFBOX-2852: add private constructor, make class final
|
|
Java
|
apache-2.0
|
b0a5cc2c3952803e84390095b962cf3a8e53abce
| 0
|
mglukhikh/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,petteyg/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,kdwink/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,Distrotech/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,caot/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,fnouama/intellij-community,izonder/intellij-community,petteyg/intellij-community,kool79/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,caot/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,holmes/intellij-community,consulo/consulo,da1z/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,da1z/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,jagguli/intellij-community,wreckJ/intellij-community,caot/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,FHannes/intellij-community,consulo/consulo,ftomassetti/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,blademainer/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,amith01994/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,ernestp/consulo,nicolargo/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,joewalnes/idea-community,ryano144/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,da1z/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,kool79/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,ernestp/consulo,suncycheng/intellij-community,adedayo/intellij-community,robovm/robovm-studio,ernestp/consulo,SerCeMan/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,allotria/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,dslomov/intellij-community,holmes/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,samthor/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,caot/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,nicolargo/intellij-community,slisson/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,asedunov/intellij-community,allotria/intellij-community,blademainer/intellij-community,hurricup/intellij-community,vladmm/intellij-community,adedayo/intellij-community,holmes/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,samthor/intellij-community,fitermay/intellij-community,holmes/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,dslomov/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,holmes/intellij-community,caot/intellij-community,semonte/intellij-community,jagguli/intellij-community,kdwink/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,signed/intellij-community,ahb0327/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,allotria/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,signed/intellij-community,signed/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,ryano144/intellij-community,hurricup/intellij-community,izonder/intellij-community,supersven/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,joewalnes/idea-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,holmes/intellij-community,signed/intellij-community,apixandru/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,joewalnes/idea-community,ftomassetti/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,fnouama/intellij-community,petteyg/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,slisson/intellij-community,ryano144/intellij-community,izonder/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,xfournet/intellij-community,apixandru/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,blademainer/intellij-community,ernestp/consulo,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,kool79/intellij-community,ryano144/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,vladmm/intellij-community,supersven/intellij-community,retomerz/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,da1z/intellij-community,caot/intellij-community,FHannes/intellij-community,kool79/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,vladmm/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,retomerz/intellij-community,fitermay/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,asedunov/intellij-community,signed/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,consulo/consulo,fnouama/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,clumsy/intellij-community,fitermay/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,joewalnes/idea-community,robovm/robovm-studio,michaelgallacher/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,robovm/robovm-studio,amith01994/intellij-community,retomerz/intellij-community,fitermay/intellij-community,holmes/intellij-community,retomerz/intellij-community,izonder/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,adedayo/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,semonte/intellij-community,clumsy/intellij-community,consulo/consulo,MER-GROUP/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,slisson/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,holmes/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,semonte/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,blademainer/intellij-community,da1z/intellij-community,caot/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,caot/intellij-community,semonte/intellij-community,petteyg/intellij-community,jagguli/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,izonder/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,xfournet/intellij-community,joewalnes/idea-community,muntasirsyed/intellij-community,da1z/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,izonder/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,ernestp/consulo,hurricup/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,slisson/intellij-community,kdwink/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,da1z/intellij-community,slisson/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,joewalnes/idea-community,diorcety/intellij-community,suncycheng/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,retomerz/intellij-community,izonder/intellij-community,vladmm/intellij-community,fitermay/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,Lekanich/intellij-community,caot/intellij-community,da1z/intellij-community,fitermay/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,semonte/intellij-community,ryano144/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,retomerz/intellij-community,signed/intellij-community,ahb0327/intellij-community,holmes/intellij-community,ryano144/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,petteyg/intellij-community,diorcety/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,joewalnes/idea-community,FHannes/intellij-community,signed/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,ryano144/intellij-community,diorcety/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,caot/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,supersven/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,samthor/intellij-community,FHannes/intellij-community,joewalnes/idea-community,petteyg/intellij-community,da1z/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,consulo/consulo,signed/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,fnouama/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,FHannes/intellij-community,FHannes/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,allotria/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,semonte/intellij-community,dslomov/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,samthor/intellij-community,suncycheng/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,orekyuu/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,caot/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,consulo/consulo,ibinti/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,caot/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,holmes/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,blademainer/intellij-community,signed/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,semonte/intellij-community,joewalnes/idea-community,fengbaicanhe/intellij-community,blademainer/intellij-community,fnouama/intellij-community,petteyg/intellij-community,da1z/intellij-community,semonte/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,petteyg/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,apixandru/intellij-community,ernestp/consulo,vvv1559/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,supersven/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,akosyakov/intellij-community,semonte/intellij-community,fnouama/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,slisson/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,dslomov/intellij-community,fnouama/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,retomerz/intellij-community,kool79/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,izonder/intellij-community,kool79/intellij-community,clumsy/intellij-community,amith01994/intellij-community,adedayo/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.file;
import com.intellij.psi.*;
import com.intellij.psi.templateLanguages.TemplateLanguageFileViewProvider;
import com.intellij.util.IncorrectOperationException;
/**
* @author Maxim.Mossienko
* Date: Sep 18, 2008
* Time: 3:33:07 PM
*/
public class JavaUpdateAddedFileProcessor extends UpdateAddedFileProcessor {
public boolean canProcessElement(final PsiFile file) {
return file instanceof PsiClassOwner;
}
public void update(final PsiFile element, PsiFile originalElement) throws IncorrectOperationException {
if (element.getViewProvider() instanceof TemplateLanguageFileViewProvider) return;
PsiDirectory dir = element.getContainingDirectory();
if (dir == null) return;
PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(dir);
if (aPackage == null) return;
String packageName = aPackage.getQualifiedName();
((PsiClassOwner)element).setPackageName(packageName);
}
}
|
java/java-impl/src/com/intellij/psi/impl/file/JavaUpdateAddedFileProcessor.java
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.file;
import com.intellij.psi.*;
import com.intellij.util.IncorrectOperationException;
/**
* @author Maxim.Mossienko
* Date: Sep 18, 2008
* Time: 3:33:07 PM
*/
public class JavaUpdateAddedFileProcessor extends UpdateAddedFileProcessor {
public boolean canProcessElement(final PsiFile file) {
return file instanceof PsiClassOwner;
}
public void update(final PsiFile element, PsiFile originalElement) throws IncorrectOperationException {
if (element.getViewProvider().getLanguages().size() > 1) return;
PsiDirectory dir = element.getContainingDirectory();
if (dir == null) return;
PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(dir);
if (aPackage == null) return;
String packageName = aPackage.getQualifiedName();
((PsiClassOwner)element).setPackageName(packageName);
}
}
|
IDEA-52048: "Could not set package name for jsp files" message when creating an XHTML file
|
java/java-impl/src/com/intellij/psi/impl/file/JavaUpdateAddedFileProcessor.java
|
IDEA-52048: "Could not set package name for jsp files" message when creating an XHTML file
|
|
Java
|
apache-2.0
|
5900bcd6d03743d5d44fba1d7cfd9496c7f56421
| 0
|
amar-sharma/selenium,misttechnologies/selenium,titusfortner/selenium,chrsmithdemos/selenium,onedox/selenium,davehunt/selenium,RamaraoDonta/ramarao-clone,s2oBCN/selenium,lummyare/lummyare-lummy,sag-enorman/selenium,gotcha/selenium,customcommander/selenium,SouWilliams/selenium,arunsingh/selenium,gemini-testing/selenium,uchida/selenium,asashour/selenium,eric-stanley/selenium,manuelpirez/selenium,sebady/selenium,stupidnetizen/selenium,rovner/selenium,blackboarddd/selenium,customcommander/selenium,dbo/selenium,dimacus/selenium,sri85/selenium,Appdynamics/selenium,slongwang/selenium,twalpole/selenium,xmhubj/selenium,bartolkaruza/selenium,stupidnetizen/selenium,HtmlUnit/selenium,zenefits/selenium,i17c/selenium,rovner/selenium,eric-stanley/selenium,blackboarddd/selenium,carsonmcdonald/selenium,skurochkin/selenium,quoideneuf/selenium,SouWilliams/selenium,HtmlUnit/selenium,gorlemik/selenium,gabrielsimas/selenium,sankha93/selenium,RamaraoDonta/ramarao-clone,quoideneuf/selenium,misttechnologies/selenium,blueyed/selenium,jerome-jacob/selenium,clavery/selenium,isaksky/selenium,amikey/selenium,uchida/selenium,dibagga/selenium,i17c/selenium,soundcloud/selenium,kalyanjvn1/selenium,isaksky/selenium,rplevka/selenium,Sravyaksr/selenium,p0deje/selenium,5hawnknight/selenium,asashour/selenium,dcjohnson1989/selenium,valfirst/selenium,JosephCastro/selenium,zenefits/selenium,jsakamoto/selenium,dandv/selenium,lmtierney/selenium,soundcloud/selenium,onedox/selenium,TheBlackTuxCorp/selenium,BlackSmith/selenium,dkentw/selenium,pulkitsinghal/selenium,vinay-qa/vinayit-android-server-apk,TheBlackTuxCorp/selenium,manuelpirez/selenium,titusfortner/selenium,Tom-Trumper/selenium,carlosroh/selenium,krosenvold/selenium,TheBlackTuxCorp/selenium,dcjohnson1989/selenium,oddui/selenium,GorK-ChO/selenium,blackboarddd/selenium,HtmlUnit/selenium,slongwang/selenium,wambat/selenium,gemini-testing/selenium,lummyare/lummyare-lummy,dibagga/selenium,joshuaduffy/selenium,quoideneuf/selenium,jerome-jacob/selenium,slongwang/selenium,chrisblock/selenium,vinay-qa/vinayit-android-server-apk,BlackSmith/selenium,carsonmcdonald/selenium,i17c/selenium,orange-tv-blagnac/selenium,doungni/selenium,SouWilliams/selenium,vveliev/selenium,gurayinan/selenium,Jarob22/selenium,jsarenik/jajomojo-selenium,yukaReal/selenium,stupidnetizen/selenium,freynaud/selenium,minhthuanit/selenium,quoideneuf/selenium,amar-sharma/selenium,SouWilliams/selenium,manuelpirez/selenium,davehunt/selenium,juangj/selenium,lrowe/selenium,oddui/selenium,mojwang/selenium,Tom-Trumper/selenium,HtmlUnit/selenium,denis-vilyuzhanin/selenium-fastview,jsakamoto/selenium,sevaseva/selenium,dkentw/selenium,rrussell39/selenium,yukaReal/selenium,amikey/selenium,lukeis/selenium,sag-enorman/selenium,jabbrwcky/selenium,tarlabs/selenium,lrowe/selenium,MeetMe/selenium,tkurnosova/selenium,rovner/selenium,xsyntrex/selenium,DrMarcII/selenium,gotcha/selenium,lummyare/lummyare-lummy,MeetMe/selenium,skurochkin/selenium,lilredindy/selenium,gregerrag/selenium,blackboarddd/selenium,joshbruning/selenium,amar-sharma/selenium,sevaseva/selenium,AutomatedTester/selenium,denis-vilyuzhanin/selenium-fastview,bayandin/selenium,compstak/selenium,twalpole/selenium,DrMarcII/selenium,Tom-Trumper/selenium,Dude-X/selenium,joshmgrant/selenium,jabbrwcky/selenium,GorK-ChO/selenium,gorlemik/selenium,orange-tv-blagnac/selenium,yukaReal/selenium,TheBlackTuxCorp/selenium,MCGallaspy/selenium,bartolkaruza/selenium,SevInf/IEDriver,rrussell39/selenium,asolntsev/selenium,thanhpete/selenium,xsyntrex/selenium,meksh/selenium,tkurnosova/selenium,thanhpete/selenium,juangj/selenium,SevInf/IEDriver,SouWilliams/selenium,sankha93/selenium,freynaud/selenium,davehunt/selenium,quoideneuf/selenium,jabbrwcky/selenium,eric-stanley/selenium,JosephCastro/selenium,lukeis/selenium,alexec/selenium,gurayinan/selenium,alb-i986/selenium,joshuaduffy/selenium,denis-vilyuzhanin/selenium-fastview,gorlemik/selenium,lilredindy/selenium,zenefits/selenium,livioc/selenium,jsarenik/jajomojo-selenium,dbo/selenium,Appdynamics/selenium,RamaraoDonta/ramarao-clone,temyers/selenium,telefonicaid/selenium,sag-enorman/selenium,xsyntrex/selenium,misttechnologies/selenium,lummyare/lummyare-test,compstak/selenium,s2oBCN/selenium,joshmgrant/selenium,gregerrag/selenium,stupidnetizen/selenium,krosenvold/selenium,arunsingh/selenium,temyers/selenium,jabbrwcky/selenium,actmd/selenium,kalyanjvn1/selenium,lrowe/selenium,xsyntrex/selenium,TikhomirovSergey/selenium,jsakamoto/selenium,carlosroh/selenium,MCGallaspy/selenium,krosenvold/selenium,lrowe/selenium,RamaraoDonta/ramarao-clone,skurochkin/selenium,petruc/selenium,pulkitsinghal/selenium,petruc/selenium,sebady/selenium,davehunt/selenium,valfirst/selenium,HtmlUnit/selenium,SeleniumHQ/selenium,mach6/selenium,xsyntrex/selenium,jsakamoto/selenium,uchida/selenium,mestihudson/selenium,bayandin/selenium,mach6/selenium,temyers/selenium,lummyare/lummyare-lummy,isaksky/selenium,vinay-qa/vinayit-android-server-apk,zenefits/selenium,MeetMe/selenium,vveliev/selenium,xmhubj/selenium,bmannix/selenium,asashour/selenium,BlackSmith/selenium,TikhomirovSergey/selenium,dcjohnson1989/selenium,mojwang/selenium,carlosroh/selenium,anshumanchatterji/selenium,doungni/selenium,GorK-ChO/selenium,yukaReal/selenium,sri85/selenium,twalpole/selenium,bmannix/selenium,5hawnknight/selenium,titusfortner/selenium,jsakamoto/selenium,sebady/selenium,SeleniumHQ/selenium,actmd/selenium,xmhubj/selenium,pulkitsinghal/selenium,SevInf/IEDriver,GorK-ChO/selenium,lummyare/lummyare-lummy,chrsmithdemos/selenium,gemini-testing/selenium,vinay-qa/vinayit-android-server-apk,juangj/selenium,misttechnologies/selenium,soundcloud/selenium,blueyed/selenium,amikey/selenium,dimacus/selenium,JosephCastro/selenium,bartolkaruza/selenium,Herst/selenium,DrMarcII/selenium,p0deje/selenium,wambat/selenium,eric-stanley/selenium,Ardesco/selenium,rplevka/selenium,jknguyen/josephknguyen-selenium,skurochkin/selenium,krmahadevan/selenium,oddui/selenium,SouWilliams/selenium,pulkitsinghal/selenium,i17c/selenium,titusfortner/selenium,jabbrwcky/selenium,oddui/selenium,alb-i986/selenium,gabrielsimas/selenium,AutomatedTester/selenium,gabrielsimas/selenium,Herst/selenium,vinay-qa/vinayit-android-server-apk,lummyare/lummyare-test,MCGallaspy/selenium,kalyanjvn1/selenium,tkurnosova/selenium,onedox/selenium,thanhpete/selenium,sankha93/selenium,arunsingh/selenium,mach6/selenium,titusfortner/selenium,bayandin/selenium,HtmlUnit/selenium,asolntsev/selenium,krmahadevan/selenium,jerome-jacob/selenium,rovner/selenium,juangj/selenium,aluedeke/chromedriver,sri85/selenium,Herst/selenium,knorrium/selenium,manuelpirez/selenium,petruc/selenium,krmahadevan/selenium,Tom-Trumper/selenium,tbeadle/selenium,valfirst/selenium,houchj/selenium,meksh/selenium,jknguyen/josephknguyen-selenium,Tom-Trumper/selenium,titusfortner/selenium,jsarenik/jajomojo-selenium,compstak/selenium,temyers/selenium,JosephCastro/selenium,Tom-Trumper/selenium,SevInf/IEDriver,isaksky/selenium,p0deje/selenium,freynaud/selenium,5hawnknight/selenium,dimacus/selenium,asashour/selenium,compstak/selenium,Jarob22/selenium,Jarob22/selenium,Jarob22/selenium,aluedeke/chromedriver,joshmgrant/selenium,asolntsev/selenium,aluedeke/chromedriver,titusfortner/selenium,rplevka/selenium,Herst/selenium,krmahadevan/selenium,sevaseva/selenium,Dude-X/selenium,dkentw/selenium,lummyare/lummyare-test,bmannix/selenium,uchida/selenium,eric-stanley/selenium,misttechnologies/selenium,rrussell39/selenium,customcommander/selenium,stupidnetizen/selenium,TikhomirovSergey/selenium,aluedeke/chromedriver,asolntsev/selenium,zenefits/selenium,sag-enorman/selenium,Sravyaksr/selenium,asolntsev/selenium,isaksky/selenium,SevInf/IEDriver,dbo/selenium,telefonicaid/selenium,doungni/selenium,Appdynamics/selenium,AutomatedTester/selenium,mojwang/selenium,5hawnknight/selenium,Ardesco/selenium,tarlabs/selenium,soundcloud/selenium,gotcha/selenium,krmahadevan/selenium,lmtierney/selenium,Dude-X/selenium,MCGallaspy/selenium,blackboarddd/selenium,sag-enorman/selenium,misttechnologies/selenium,GorK-ChO/selenium,mojwang/selenium,tarlabs/selenium,quoideneuf/selenium,gurayinan/selenium,manuelpirez/selenium,5hawnknight/selenium,amikey/selenium,orange-tv-blagnac/selenium,bmannix/selenium,joshmgrant/selenium,doungni/selenium,vinay-qa/vinayit-android-server-apk,lukeis/selenium,i17c/selenium,TheBlackTuxCorp/selenium,skurochkin/selenium,SevInf/IEDriver,misttechnologies/selenium,krosenvold/selenium,stupidnetizen/selenium,mojwang/selenium,gregerrag/selenium,valfirst/selenium,bartolkaruza/selenium,o-schneider/selenium,gemini-testing/selenium,stupidnetizen/selenium,valfirst/selenium,juangj/selenium,doungni/selenium,p0deje/selenium,lukeis/selenium,asolntsev/selenium,rrussell39/selenium,arunsingh/selenium,rovner/selenium,mojwang/selenium,tbeadle/selenium,dimacus/selenium,alb-i986/selenium,bmannix/selenium,soundcloud/selenium,thanhpete/selenium,RamaraoDonta/ramarao-clone,SeleniumHQ/selenium,rovner/selenium,amikey/selenium,dibagga/selenium,blackboarddd/selenium,jerome-jacob/selenium,yukaReal/selenium,s2oBCN/selenium,anshumanchatterji/selenium,petruc/selenium,MCGallaspy/selenium,sankha93/selenium,blackboarddd/selenium,anshumanchatterji/selenium,blackboarddd/selenium,JosephCastro/selenium,bayandin/selenium,arunsingh/selenium,5hawnknight/selenium,wambat/selenium,arunsingh/selenium,MeetMe/selenium,dbo/selenium,TheBlackTuxCorp/selenium,actmd/selenium,Ardesco/selenium,lummyare/lummyare-test,SeleniumHQ/selenium,joshuaduffy/selenium,bartolkaruza/selenium,titusfortner/selenium,valfirst/selenium,wambat/selenium,gemini-testing/selenium,alb-i986/selenium,Dude-X/selenium,markodolancic/selenium,chrisblock/selenium,carsonmcdonald/selenium,sebady/selenium,twalpole/selenium,markodolancic/selenium,sri85/selenium,pulkitsinghal/selenium,Ardesco/selenium,Sravyaksr/selenium,amar-sharma/selenium,rovner/selenium,JosephCastro/selenium,mojwang/selenium,valfirst/selenium,jabbrwcky/selenium,jsarenik/jajomojo-selenium,chrsmithdemos/selenium,Appdynamics/selenium,lummyare/lummyare-lummy,s2oBCN/selenium,onedox/selenium,lilredindy/selenium,minhthuanit/selenium,lmtierney/selenium,livioc/selenium,SouWilliams/selenium,o-schneider/selenium,Appdynamics/selenium,gurayinan/selenium,customcommander/selenium,TheBlackTuxCorp/selenium,petruc/selenium,quoideneuf/selenium,lmtierney/selenium,dibagga/selenium,aluedeke/chromedriver,rplevka/selenium,dcjohnson1989/selenium,joshuaduffy/selenium,SeleniumHQ/selenium,DrMarcII/selenium,temyers/selenium,vveliev/selenium,livioc/selenium,amikey/selenium,Sravyaksr/selenium,minhthuanit/selenium,Sravyaksr/selenium,telefonicaid/selenium,twalpole/selenium,joshuaduffy/selenium,gotcha/selenium,Sravyaksr/selenium,tkurnosova/selenium,gabrielsimas/selenium,lilredindy/selenium,slongwang/selenium,livioc/selenium,MeetMe/selenium,s2oBCN/selenium,mach6/selenium,bmannix/selenium,knorrium/selenium,AutomatedTester/selenium,asashour/selenium,rovner/selenium,carsonmcdonald/selenium,titusfortner/selenium,knorrium/selenium,knorrium/selenium,TikhomirovSergey/selenium,gotcha/selenium,Jarob22/selenium,lrowe/selenium,isaksky/selenium,quoideneuf/selenium,jsarenik/jajomojo-selenium,vveliev/selenium,alb-i986/selenium,xsyntrex/selenium,MCGallaspy/selenium,s2oBCN/selenium,MeetMe/selenium,anshumanchatterji/selenium,chrsmithdemos/selenium,mestihudson/selenium,gregerrag/selenium,zenefits/selenium,5hawnknight/selenium,lrowe/selenium,orange-tv-blagnac/selenium,houchj/selenium,Jarob22/selenium,jknguyen/josephknguyen-selenium,tarlabs/selenium,aluedeke/chromedriver,carlosroh/selenium,tbeadle/selenium,davehunt/selenium,tarlabs/selenium,meksh/selenium,temyers/selenium,compstak/selenium,eric-stanley/selenium,jabbrwcky/selenium,Herst/selenium,bmannix/selenium,dandv/selenium,Jarob22/selenium,sankha93/selenium,minhthuanit/selenium,zenefits/selenium,clavery/selenium,dcjohnson1989/selenium,Dude-X/selenium,telefonicaid/selenium,joshbruning/selenium,Herst/selenium,houchj/selenium,livioc/selenium,wambat/selenium,livioc/selenium,valfirst/selenium,lilredindy/selenium,valfirst/selenium,minhthuanit/selenium,o-schneider/selenium,clavery/selenium,lukeis/selenium,amikey/selenium,joshuaduffy/selenium,alb-i986/selenium,carlosroh/selenium,tbeadle/selenium,pulkitsinghal/selenium,chrisblock/selenium,GorK-ChO/selenium,s2oBCN/selenium,sri85/selenium,meksh/selenium,dbo/selenium,joshmgrant/selenium,tarlabs/selenium,aluedeke/chromedriver,freynaud/selenium,gabrielsimas/selenium,s2oBCN/selenium,jsarenik/jajomojo-selenium,vinay-qa/vinayit-android-server-apk,slongwang/selenium,mach6/selenium,jsarenik/jajomojo-selenium,SouWilliams/selenium,skurochkin/selenium,SeleniumHQ/selenium,alb-i986/selenium,houchj/selenium,amikey/selenium,TheBlackTuxCorp/selenium,Dude-X/selenium,AutomatedTester/selenium,Herst/selenium,dkentw/selenium,TikhomirovSergey/selenium,dimacus/selenium,chrsmithdemos/selenium,slongwang/selenium,gorlemik/selenium,freynaud/selenium,dibagga/selenium,sevaseva/selenium,gorlemik/selenium,actmd/selenium,carsonmcdonald/selenium,compstak/selenium,kalyanjvn1/selenium,actmd/selenium,lummyare/lummyare-test,DrMarcII/selenium,customcommander/selenium,alexec/selenium,gorlemik/selenium,MeetMe/selenium,Tom-Trumper/selenium,valfirst/selenium,titusfortner/selenium,customcommander/selenium,valfirst/selenium,i17c/selenium,meksh/selenium,tbeadle/selenium,skurochkin/selenium,soundcloud/selenium,MCGallaspy/selenium,lmtierney/selenium,orange-tv-blagnac/selenium,denis-vilyuzhanin/selenium-fastview,dimacus/selenium,actmd/selenium,dandv/selenium,o-schneider/selenium,freynaud/selenium,DrMarcII/selenium,slongwang/selenium,onedox/selenium,BlackSmith/selenium,Tom-Trumper/selenium,SeleniumHQ/selenium,actmd/selenium,customcommander/selenium,anshumanchatterji/selenium,yukaReal/selenium,yukaReal/selenium,gemini-testing/selenium,sag-enorman/selenium,gorlemik/selenium,soundcloud/selenium,o-schneider/selenium,bayandin/selenium,orange-tv-blagnac/selenium,joshmgrant/selenium,thanhpete/selenium,bayandin/selenium,p0deje/selenium,tbeadle/selenium,knorrium/selenium,manuelpirez/selenium,arunsingh/selenium,mach6/selenium,eric-stanley/selenium,isaksky/selenium,jsakamoto/selenium,jerome-jacob/selenium,markodolancic/selenium,manuelpirez/selenium,bayandin/selenium,Appdynamics/selenium,sri85/selenium,gabrielsimas/selenium,juangj/selenium,rrussell39/selenium,livioc/selenium,chrisblock/selenium,amar-sharma/selenium,pulkitsinghal/selenium,joshmgrant/selenium,wambat/selenium,dbo/selenium,denis-vilyuzhanin/selenium-fastview,i17c/selenium,chrsmithdemos/selenium,joshuaduffy/selenium,jabbrwcky/selenium,Ardesco/selenium,freynaud/selenium,p0deje/selenium,lukeis/selenium,thanhpete/selenium,zenefits/selenium,joshmgrant/selenium,minhthuanit/selenium,jknguyen/josephknguyen-selenium,knorrium/selenium,joshmgrant/selenium,tkurnosova/selenium,mestihudson/selenium,asashour/selenium,chrsmithdemos/selenium,gregerrag/selenium,vinay-qa/vinayit-android-server-apk,mestihudson/selenium,xmhubj/selenium,anshumanchatterji/selenium,Sravyaksr/selenium,gregerrag/selenium,5hawnknight/selenium,lummyare/lummyare-test,jerome-jacob/selenium,rplevka/selenium,krmahadevan/selenium,manuelpirez/selenium,alexec/selenium,lmtierney/selenium,krosenvold/selenium,alb-i986/selenium,xmhubj/selenium,oddui/selenium,TheBlackTuxCorp/selenium,houchj/selenium,sag-enorman/selenium,vveliev/selenium,blueyed/selenium,joshuaduffy/selenium,i17c/selenium,tbeadle/selenium,actmd/selenium,telefonicaid/selenium,oddui/selenium,mestihudson/selenium,o-schneider/selenium,mestihudson/selenium,amikey/selenium,tarlabs/selenium,alexec/selenium,compstak/selenium,yukaReal/selenium,sankha93/selenium,dcjohnson1989/selenium,xmhubj/selenium,uchida/selenium,mach6/selenium,rplevka/selenium,joshmgrant/selenium,lummyare/lummyare-test,vveliev/selenium,jknguyen/josephknguyen-selenium,skurochkin/selenium,JosephCastro/selenium,chrisblock/selenium,sebady/selenium,dandv/selenium,rrussell39/selenium,xsyntrex/selenium,dcjohnson1989/selenium,gotcha/selenium,SeleniumHQ/selenium,knorrium/selenium,alexec/selenium,knorrium/selenium,jsarenik/jajomojo-selenium,twalpole/selenium,Jarob22/selenium,HtmlUnit/selenium,denis-vilyuzhanin/selenium-fastview,mach6/selenium,minhthuanit/selenium,rrussell39/selenium,bartolkaruza/selenium,HtmlUnit/selenium,tarlabs/selenium,Tom-Trumper/selenium,dbo/selenium,asashour/selenium,oddui/selenium,temyers/selenium,Appdynamics/selenium,lrowe/selenium,denis-vilyuzhanin/selenium-fastview,thanhpete/selenium,mestihudson/selenium,lrowe/selenium,amar-sharma/selenium,dandv/selenium,slongwang/selenium,anshumanchatterji/selenium,kalyanjvn1/selenium,mestihudson/selenium,kalyanjvn1/selenium,bartolkaruza/selenium,blackboarddd/selenium,o-schneider/selenium,rrussell39/selenium,markodolancic/selenium,mestihudson/selenium,onedox/selenium,petruc/selenium,SevInf/IEDriver,customcommander/selenium,blueyed/selenium,carsonmcdonald/selenium,dibagga/selenium,AutomatedTester/selenium,stupidnetizen/selenium,Jarob22/selenium,gabrielsimas/selenium,rovner/selenium,carlosroh/selenium,lilredindy/selenium,p0deje/selenium,clavery/selenium,alexec/selenium,jerome-jacob/selenium,joshmgrant/selenium,krosenvold/selenium,jsakamoto/selenium,amar-sharma/selenium,uchida/selenium,krmahadevan/selenium,chrisblock/selenium,sevaseva/selenium,TikhomirovSergey/selenium,xsyntrex/selenium,livioc/selenium,lmtierney/selenium,sri85/selenium,o-schneider/selenium,BlackSmith/selenium,joshbruning/selenium,lrowe/selenium,markodolancic/selenium,tkurnosova/selenium,carsonmcdonald/selenium,sri85/selenium,AutomatedTester/selenium,uchida/selenium,dbo/selenium,gabrielsimas/selenium,clavery/selenium,meksh/selenium,lummyare/lummyare-lummy,telefonicaid/selenium,GorK-ChO/selenium,dkentw/selenium,isaksky/selenium,krmahadevan/selenium,sri85/selenium,livioc/selenium,mach6/selenium,compstak/selenium,blueyed/selenium,sebady/selenium,knorrium/selenium,freynaud/selenium,meksh/selenium,orange-tv-blagnac/selenium,blueyed/selenium,compstak/selenium,gregerrag/selenium,dibagga/selenium,soundcloud/selenium,telefonicaid/selenium,joshbruning/selenium,krosenvold/selenium,chrsmithdemos/selenium,minhthuanit/selenium,AutomatedTester/selenium,orange-tv-blagnac/selenium,gorlemik/selenium,carsonmcdonald/selenium,asolntsev/selenium,gurayinan/selenium,lilredindy/selenium,sebady/selenium,wambat/selenium,sebady/selenium,Ardesco/selenium,petruc/selenium,sebady/selenium,clavery/selenium,doungni/selenium,tkurnosova/selenium,krosenvold/selenium,JosephCastro/selenium,gotcha/selenium,davehunt/selenium,Sravyaksr/selenium,lmtierney/selenium,Herst/selenium,temyers/selenium,yukaReal/selenium,bayandin/selenium,lilredindy/selenium,DrMarcII/selenium,5hawnknight/selenium,onedox/selenium,vinay-qa/vinayit-android-server-apk,dimacus/selenium,jerome-jacob/selenium,lummyare/lummyare-test,kalyanjvn1/selenium,dibagga/selenium,sevaseva/selenium,lukeis/selenium,bmannix/selenium,clavery/selenium,jknguyen/josephknguyen-selenium,gurayinan/selenium,anshumanchatterji/selenium,gemini-testing/selenium,RamaraoDonta/ramarao-clone,joshuaduffy/selenium,dkentw/selenium,alexec/selenium,gemini-testing/selenium,dkentw/selenium,denis-vilyuzhanin/selenium-fastview,DrMarcII/selenium,vveliev/selenium,lilredindy/selenium,manuelpirez/selenium,eric-stanley/selenium,Herst/selenium,joshbruning/selenium,dibagga/selenium,dandv/selenium,SouWilliams/selenium,Dude-X/selenium,dandv/selenium,RamaraoDonta/ramarao-clone,jerome-jacob/selenium,dimacus/selenium,asolntsev/selenium,SevInf/IEDriver,tbeadle/selenium,vveliev/selenium,jknguyen/josephknguyen-selenium,vveliev/selenium,dkentw/selenium,doungni/selenium,jknguyen/josephknguyen-selenium,sevaseva/selenium,GorK-ChO/selenium,BlackSmith/selenium,gurayinan/selenium,SeleniumHQ/selenium,davehunt/selenium,doungni/selenium,asashour/selenium,jknguyen/josephknguyen-selenium,actmd/selenium,eric-stanley/selenium,o-schneider/selenium,skurochkin/selenium,kalyanjvn1/selenium,GorK-ChO/selenium,joshbruning/selenium,rplevka/selenium,amar-sharma/selenium,zenefits/selenium,AutomatedTester/selenium,joshbruning/selenium,houchj/selenium,clavery/selenium,xsyntrex/selenium,lukeis/selenium,dkentw/selenium,pulkitsinghal/selenium,s2oBCN/selenium,mojwang/selenium,Ardesco/selenium,RamaraoDonta/ramarao-clone,Appdynamics/selenium,DrMarcII/selenium,denis-vilyuzhanin/selenium-fastview,markodolancic/selenium,carlosroh/selenium,blueyed/selenium,rrussell39/selenium,juangj/selenium,alb-i986/selenium,dandv/selenium,uchida/selenium,sankha93/selenium,Appdynamics/selenium,asolntsev/selenium,titusfortner/selenium,carsonmcdonald/selenium,lummyare/lummyare-test,chrsmithdemos/selenium,blueyed/selenium,xmhubj/selenium,wambat/selenium,bayandin/selenium,jsakamoto/selenium,telefonicaid/selenium,markodolancic/selenium,sevaseva/selenium,MeetMe/selenium,freynaud/selenium,gurayinan/selenium,orange-tv-blagnac/selenium,minhthuanit/selenium,petruc/selenium,dandv/selenium,stupidnetizen/selenium,tarlabs/selenium,sankha93/selenium,p0deje/selenium,joshbruning/selenium,lummyare/lummyare-lummy,rplevka/selenium,pulkitsinghal/selenium,chrisblock/selenium,BlackSmith/selenium,juangj/selenium,kalyanjvn1/selenium,RamaraoDonta/ramarao-clone,houchj/selenium,dcjohnson1989/selenium,lukeis/selenium,gorlemik/selenium,jsakamoto/selenium,TikhomirovSergey/selenium,temyers/selenium,sevaseva/selenium,JosephCastro/selenium,aluedeke/chromedriver,uchida/selenium,blueyed/selenium,SevInf/IEDriver,xmhubj/selenium,gemini-testing/selenium,Ardesco/selenium,MeetMe/selenium,markodolancic/selenium,dimacus/selenium,thanhpete/selenium,rplevka/selenium,HtmlUnit/selenium,onedox/selenium,doungni/selenium,sankha93/selenium,twalpole/selenium,SeleniumHQ/selenium,p0deje/selenium,soundcloud/selenium,anshumanchatterji/selenium,HtmlUnit/selenium,arunsingh/selenium,gabrielsimas/selenium,quoideneuf/selenium,misttechnologies/selenium,MCGallaspy/selenium,bartolkaruza/selenium,mojwang/selenium,Sravyaksr/selenium,misttechnologies/selenium,oddui/selenium,clavery/selenium,Dude-X/selenium,krmahadevan/selenium,amar-sharma/selenium,sag-enorman/selenium,xmhubj/selenium,sag-enorman/selenium,lmtierney/selenium,meksh/selenium,twalpole/selenium,houchj/selenium,gotcha/selenium,onedox/selenium,arunsingh/selenium,davehunt/selenium,twalpole/selenium,thanhpete/selenium,gotcha/selenium,BlackSmith/selenium,gregerrag/selenium,BlackSmith/selenium,chrisblock/selenium,asashour/selenium,krosenvold/selenium,dcjohnson1989/selenium,bartolkaruza/selenium,carlosroh/selenium,juangj/selenium,davehunt/selenium,carlosroh/selenium,Ardesco/selenium,aluedeke/chromedriver,alexec/selenium,jsarenik/jajomojo-selenium,dbo/selenium,jabbrwcky/selenium,i17c/selenium,tkurnosova/selenium,slongwang/selenium,oddui/selenium,TikhomirovSergey/selenium,gregerrag/selenium,Dude-X/selenium,SeleniumHQ/selenium,joshbruning/selenium,telefonicaid/selenium,tkurnosova/selenium,petruc/selenium,meksh/selenium,bmannix/selenium,alexec/selenium,wambat/selenium,houchj/selenium,MCGallaspy/selenium,customcommander/selenium,markodolancic/selenium,lummyare/lummyare-lummy,tbeadle/selenium,chrisblock/selenium,gurayinan/selenium,isaksky/selenium,TikhomirovSergey/selenium
|
/*
Copyright 2012 WebDriver committers
Copyright 2011 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium.testing.drivers;
import com.google.common.base.Supplier;
import com.opera.core.systems.OperaDriver;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
/**
* @author simonstewart@google.com (Simon Stewart)
*/
public class OperaDriverSupplier implements Supplier<WebDriver> {
private Capabilities caps;
public OperaDriverSupplier(Capabilities caps) {
this.caps = caps;
}
public WebDriver get() {
if (caps == null) {
return null;
}
if (!DesiredCapabilities.opera().getBrowserName().equals(caps.getBrowserName())) {
return null;
}
// It's okay to avoid reflection here because the OperaDriver is a third party dependency
OperaDriver driver = new OperaDriver(caps);
driver.preferences().set("User Prefs", "Ignore Unrequested Popups", false);
return driver;
}
}
|
java/client/test/org/openqa/selenium/testing/drivers/OperaDriverSupplier.java
|
// Copyright 2011 Google Inc. All Rights Reserved.
package org.openqa.selenium.testing.drivers;
import com.google.common.base.Supplier;
import com.opera.core.systems.OperaDriver;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
/**
* @author simonstewart@google.com (Simon Stewart)
*/
public class OperaDriverSupplier implements Supplier<WebDriver> {
private Capabilities caps;
public OperaDriverSupplier(Capabilities caps) {
this.caps = caps;
}
public WebDriver get() {
if (caps == null) {
return null;
}
if (!DesiredCapabilities.opera().getBrowserName().equals(caps.getBrowserName())) {
return null;
}
// It's okay to avoid reflection here because the opera driver is a third
// party dependency.
OperaDriver driver = new OperaDriver(caps);
driver.setPref("User Prefs", "Ignore Unrequested Popups", "0");
return driver;
}
}
|
AndreasTolfTolfsen: Using new preferences API
r15903
|
java/client/test/org/openqa/selenium/testing/drivers/OperaDriverSupplier.java
|
AndreasTolfTolfsen: Using new preferences API
|
|
Java
|
apache-2.0
|
0929bdad68a7adf0f2acb1ae48144ab2eb1436b4
| 0
|
topicusonderwijs/wicket,bitstorm/wicket,apache/wicket,astrapi69/wicket,bitstorm/wicket,Servoy/wicket,astrapi69/wicket,freiheit-com/wicket,mosoft521/wicket,klopfdreh/wicket,zwsong/wicket,apache/wicket,mafulafunk/wicket,dashorst/wicket,aldaris/wicket,mosoft521/wicket,freiheit-com/wicket,dashorst/wicket,selckin/wicket,Servoy/wicket,martin-g/wicket-osgi,topicusonderwijs/wicket,astrapi69/wicket,klopfdreh/wicket,astrapi69/wicket,topicusonderwijs/wicket,AlienQueen/wicket,dashorst/wicket,Servoy/wicket,topicusonderwijs/wicket,AlienQueen/wicket,freiheit-com/wicket,martin-g/wicket-osgi,aldaris/wicket,AlienQueen/wicket,mosoft521/wicket,apache/wicket,freiheit-com/wicket,selckin/wicket,mosoft521/wicket,aldaris/wicket,freiheit-com/wicket,bitstorm/wicket,klopfdreh/wicket,selckin/wicket,martin-g/wicket-osgi,klopfdreh/wicket,Servoy/wicket,selckin/wicket,zwsong/wicket,klopfdreh/wicket,apache/wicket,mafulafunk/wicket,dashorst/wicket,dashorst/wicket,mafulafunk/wicket,AlienQueen/wicket,selckin/wicket,bitstorm/wicket,aldaris/wicket,zwsong/wicket,mosoft521/wicket,Servoy/wicket,aldaris/wicket,apache/wicket,topicusonderwijs/wicket,bitstorm/wicket,zwsong/wicket,AlienQueen/wicket
|
package wicket.extensions.ajax.markup.html.autocomplete;
import wicket.RequestCycle;
import wicket.Response;
import wicket.ajax.AbstractDefaultAjaxBehavior;
import wicket.behavior.AbstractAjaxBehavior;
import wicket.markup.html.PackageResourceReference;
/**
* @since 1.2
*
* @author Janne Hietamäki (jannehietamaki)
*/
public abstract class AbstractAutoCompleteBehavior extends AbstractAjaxBehavior
{
private static final PackageResourceReference AUTOCOMPLETE_JS = new PackageResourceReference(AutoCompleteBehavior.class, "wicket-autocomplete.js");
private static final PackageResourceReference AJAX_JS = new PackageResourceReference(AbstractDefaultAjaxBehavior.class, "wicket-ajax.js");
/**
*
*/
private static final long serialVersionUID = 1L;
protected String getImplementationId()
{
return "wicket-default";
}
protected void onBind()
{
getComponent().setOutputMarkupId(true);
}
protected void onRenderHeadInitContribution(Response response)
{
super.onRenderHeadInitContribution(response);
writeJsReference(response, AJAX_JS);
writeJsReference(response, AUTOCOMPLETE_JS);
}
protected void onComponentRendered()
{
Response response=getComponent().getResponse();
final String id = getComponent().getMarkupId();
response.write("<script type=\"text/javascript\">");
response.write("new WicketAutoComplete('"+id+"','"+getCallbackUrl()+"');");
response.write("</script>");
response.write("<div style=\"position:absolute;z-index:100;display:none;\" id=\""+id+"-autocomplete\" class=\"wicket-aa\"></div>");
}
/**
* @see wicket.behavior.IBehaviorListener#onRequest()
*/
public final void onRequest()
{
final RequestCycle requestCycle = RequestCycle.get();
final String val = requestCycle.getRequest().getParameter("q");
onRequest(val, requestCycle);
}
/**
* Callback for the ajax event generated by the javascript. This is
* where we need to generate our response.
*
* @param input
* the input entered so far
* @param requestCycle
* current request cycle
*/
protected abstract void onRequest(String input, RequestCycle requestCycle);
}
|
wicket-extensions/src/java/wicket/extensions/ajax/markup/html/autocomplete/AbstractAutoCompleteBehavior.java
|
package wicket.extensions.ajax.markup.html.autocomplete;
import wicket.RequestCycle;
import wicket.Response;
import wicket.ajax.AbstractDefaultAjaxBehavior;
import wicket.behavior.AbstractAjaxBehavior;
import wicket.markup.html.PackageResourceReference;
/**
* @since 1.2
*
* @author Janne Hietamäki (jannehietamaki)
*/
public abstract class AbstractAutoCompleteBehavior extends AbstractAjaxBehavior
{
private static final PackageResourceReference AUTOCOMPLETE_JS = new PackageResourceReference(AutoCompleteBehavior.class, "wicket-autocomplete.js");
private static final PackageResourceReference AJAX_JS = new PackageResourceReference(AbstractDefaultAjaxBehavior.class, "wicket-ajax.js");
/**
*
*/
private static final long serialVersionUID = 1L;
protected String getImplementationId()
{
return "wicket-default";
}
protected void onBind()
{
getComponent().setOutputMarkupId(true);
}
protected void onRenderHeadInitContribution(Response response)
{
super.onRenderHeadInitContribution(response);
writeJsReference(response, AJAX_JS);
writeJsReference(response, AUTOCOMPLETE_JS);
}
protected void onComponentRendered()
{
Response response=getComponent().getResponse();
final String id = getComponent().getMarkupId();
response.write("<script type=\"text/javascript\">");
response.write("new WicketAutoComplete('"+id+"','"+getCallbackUrl()+"');");
response.write("</script>");
response.write("<div style=\"position:absolute;z-index:100;\" id=\""+id+"-autocomplete\" class=\"wicket-aa\"></div>");
}
/**
* @see wicket.behavior.IBehaviorListener#onRequest()
*/
public final void onRequest()
{
final RequestCycle requestCycle = RequestCycle.get();
final String val = requestCycle.getRequest().getParameter("q");
onRequest(val, requestCycle);
}
/**
* Callback for the ajax event generated by the javascript. This is
* where we need to generate our response.
*
* @param input
* the input entered so far
* @param requestCycle
* current request cycle
*/
protected abstract void onRequest(String input, RequestCycle requestCycle);
}
|
always initially hidden
git-svn-id: ac804e38dcddf5e42ac850d29d9218b7df6087b7@460235 13f79535-47bb-0310-9956-ffa450edef68
|
wicket-extensions/src/java/wicket/extensions/ajax/markup/html/autocomplete/AbstractAutoCompleteBehavior.java
|
always initially hidden
|
|
Java
|
apache-2.0
|
7808954dee072183af25248285718dc9468e7d6c
| 0
|
RiparianData/Timberwolf,RiparianData/Timberwolf
|
/**
* Copyright 2012 Riparian Data
* http://www.ripariandata.com
* contact@ripariandata.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ripariandata.timberwolf.conf4j;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Marks a field as being a target for a ConfigFileParser to update with data
* from a configuration file.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.FIELD })
public @interface ConfigEntry
{
/** The key name in the configuration file for this entry. */
String name();
/** Short description of the entry, suitable for printing to the console. */
String usage() default "";
}
|
src/main/java/com/ripariandata/timberwolf/conf4j/ConfigEntry.java
|
/**
* Copyright 2012 Riparian Data
* http://www.ripariandata.com
* contact@ripariandata.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ripariandata.timberwolf.conf4j;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Marks a field as being a target for a ConfigFileParser to update with data
* from a configuration file.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.FIELD })
public @interface ConfigEntry
{
/** The key name in the configuration file for this entry. */
String name();
/** Short description of the entry, suitable for printing to the console. */
String usage();
}
|
Adding default value for usage argument.
|
src/main/java/com/ripariandata/timberwolf/conf4j/ConfigEntry.java
|
Adding default value for usage argument.
|
|
Java
|
apache-2.0
|
56474137b5691bcfc51705f18bf166287a5ad931
| 0
|
MatthewTamlin/Spyglass
|
package com.matthewtamlin.spyglass.library.default_adapters;
import android.content.Context;
import com.matthewtamlin.spyglass.library.default_annotations.DefaultToColor;
import static com.matthewtamlin.java_utilities.checkers.NullChecker.checkNotNull;
public class DefaultToColorAdapter implements DefaultAdapter<Integer, DefaultToColor> {
@Override
public Integer getDefault(final DefaultToColor annotation, final Context context) {
checkNotNull(annotation, "Argument \'annotation\' cannot be null.");
checkNotNull(context, "Argument \'context\' cannot be null.");
return annotation.value();
}
}
|
library/src/main/java/com/matthewtamlin/spyglass/library/default_adapters/DefaultToColorAdapter.java
|
package com.matthewtamlin.spyglass.library.default_adapters;
import android.content.Context;
import com.matthewtamlin.spyglass.library.default_annotations.DefaultToColor;
import static com.matthewtamlin.java_utilities.checkers.NullChecker.checkNotNull;
public class DefaultToColorAdapter implements DefaultAdapter<Integer, DefaultToColor> {
@Override
public Integer getDefault(final DefaultToColor annotation, final Context context) {
checkNotNull(annotation, "Argument \'annotation\' cannot be null.");
return annotation.value();
}
}
|
Added missing null checks
|
library/src/main/java/com/matthewtamlin/spyglass/library/default_adapters/DefaultToColorAdapter.java
|
Added missing null checks
|
|
Java
|
apache-2.0
|
0671eab608fcac3acb1586fa424e9719d73bf772
| 0
|
EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci
|
package uk.ac.ebi.spot.goci.curation.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import uk.ac.ebi.spot.goci.curation.model.Association;
import uk.ac.ebi.spot.goci.curation.model.EFOTrait;
import uk.ac.ebi.spot.goci.curation.model.SingleNucleotidePolymorphism;
import uk.ac.ebi.spot.goci.curation.model.SingleNucleotidePolymorphismXref;
import uk.ac.ebi.spot.goci.curation.repository.*;
import uk.ac.ebi.spot.goci.curation.service.CuratorReportedSNP;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Created by emma on 06/01/15.
*
* @author emma
* Association controller, interpret user input and transform it into a snp/association
* model that is represented to the user by the associated HTML page. Used to view, add and edit
* existing snp/assocaition information. Also creates entry in SNP table for any new SNPs entered in html form
*/
@Controller
public class AssociationController {
private AssociationRepository associationRepository;
private StudyRepository studyRepository;
private EFOTraitRepository efoTraitRepository;
private SingleNucleotidePolymorphismRepository singleNucleotidePolymorphismRepository;
private SingleNucleotidePolymorphismXrefRepository singleNucleotidePolymorphismXrefRepository;
@Autowired
public AssociationController(AssociationRepository associationRepository, StudyRepository studyRepository, EFOTraitRepository efoTraitRepository, SingleNucleotidePolymorphismRepository singleNucleotidePolymorphismRepository, SingleNucleotidePolymorphismXrefRepository singleNucleotidePolymorphismXrefRepository) {
this.associationRepository = associationRepository;
this.studyRepository = studyRepository;
this.efoTraitRepository = efoTraitRepository;
this.singleNucleotidePolymorphismRepository = singleNucleotidePolymorphismRepository;
this.singleNucleotidePolymorphismXrefRepository = singleNucleotidePolymorphismXrefRepository;
}
/* Study SNP/Associations */
// Generate list of SNP associations linked to a study
@RequestMapping(value = "/studies/{studyId}/associations", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.GET)
public String viewStudySnps(Model model, @PathVariable String studyId) {
Collection<Association> associations = new ArrayList<>();
associations.addAll(associationRepository.findByStudyID(studyId));
model.addAttribute("studyAssociations", associations);
// Return an empty association object so curators can add new association/snp information to study
model.addAttribute("studyAssociation", new Association());
// Return an empty object to hold SNPs curators enter
model.addAttribute("reportedSNPs", new CuratorReportedSNP());
// Also passes back study object to view so we can create links back to main study page
model.addAttribute("study", studyRepository.findOne(Long.valueOf(studyId).longValue()));
return "study_association";
}
// Add new association/snp information to a study
@RequestMapping(value = "/studies/{studyId}/associations", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.POST)
public String addStudySnps(@ModelAttribute CuratorReportedSNP reportedSNPs, @ModelAttribute Association studyAssociation, @PathVariable String studyId) {
// Set the study ID for our association
studyAssociation.setStudyID(studyId);
// Save our association information
associationRepository.save(studyAssociation);
// ReportedSNPs object holds a collection of SNPs entered by curator
// For each SNP entered we need need to create an entry in the SNP table
for (String snp : reportedSNPs.getReportedSNPValue()) {
// Check if SNP already exists database
SingleNucleotidePolymorphism existingSNP = singleNucleotidePolymorphismRepository.findByRsIDIgnoreCase(snp);
// If SNP already exists just create link in XREF table
if (existingSNP != null) {
// Create link in XREF table and save
SingleNucleotidePolymorphismXref newSNPXref = new SingleNucleotidePolymorphismXref();
newSNPXref.setAssociationID(studyAssociation.getId());
newSNPXref.setSnpID(existingSNP.getId());
singleNucleotidePolymorphismXrefRepository.save(newSNPXref);
} else {
// Create new SNP
SingleNucleotidePolymorphism newSNP = new SingleNucleotidePolymorphism();
newSNP.setRsID(snp);
// Save SNP
singleNucleotidePolymorphismRepository.save(newSNP);
// Create link in XREF table and save
SingleNucleotidePolymorphismXref newSNPXref = new SingleNucleotidePolymorphismXref();
newSNPXref.setAssociationID(studyAssociation.getId());
newSNPXref.setSnpID(newSNP.getId());
singleNucleotidePolymorphismXrefRepository.save(newSNPXref);
}
}
return "redirect:/studies/" + studyId + "/associations";
}
/* Existing association information */
// View association information
@RequestMapping(value = "/associations/{associationId}", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.GET)
public String viewAssociation(Model model, @PathVariable Long associationId) {
// Return association with that ID
Association associationToView = associationRepository.findOne(associationId);
model.addAttribute("association", associationToView);
return "edit_association";
}
//Edit existing association
@RequestMapping(value = "/associations/{associationId}", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.POST)
public String editAssociation(@ModelAttribute Association association, @PathVariable Long associationId) {
// Save the association information returned from form
associationRepository.save(association);
return "redirect:/associations/" + associationId;
}
// View existing snp(s) linked to association
@RequestMapping(value = "/associations/{associationId}/snps", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.GET)
public String viewAssociationSNPs(Model model, @PathVariable Long associationId) {
// Find all cross-references to associated SNPs
Collection<SingleNucleotidePolymorphismXref> xrefs = new ArrayList<>();
xrefs.addAll(singleNucleotidePolymorphismXrefRepository.findByAssociationID(associationId));
// For each XREF get the SNP ID
Collection<Long> snpIDs = new ArrayList<>();
for (SingleNucleotidePolymorphismXref xref : xrefs) {
snpIDs.add(xref.getSnpID());
}
// Get rsID of SNPs associated with those IDs and return to HTML form
Collection<String> associationSNPs = new ArrayList<>();
for (Long snpID : snpIDs) {
SingleNucleotidePolymorphism associationSNP = singleNucleotidePolymorphismRepository.findOne(snpID);
associationSNPs.add(associationSNP.getRsID());
}
// Return list of SNPs entered
CuratorReportedSNP curatorReportedSNP = new CuratorReportedSNP();
curatorReportedSNP.setReportedSNPValue(associationSNPs);
// Return curator added snps for editing
model.addAttribute("reportedSNPs", curatorReportedSNP);
return "edit_snp";
}
// Edit existing snp(s) linked to association
@RequestMapping(value = "/associations/{associationId}/snps", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.POST)
public String editAssociationSNPs(@ModelAttribute CuratorReportedSNP reportedSNPs, @PathVariable Long associationId) {
// Array to hold checked snp ids
Collection<Long> checkedSNPs = new ArrayList<>();
// For each SNP entered curator
for (String reportedSNP : reportedSNPs.getReportedSNPValue()) {
// Get snp ID based on rsID entered by curator
SingleNucleotidePolymorphism snp = singleNucleotidePolymorphismRepository.findByRsIDIgnoreCase(reportedSNP);
// This is the case where we have a newly entered snp that is not yet present in database
// Thus we create snp and xref link
if (snp == null) {
// Create new SNP
SingleNucleotidePolymorphism newSNP = new SingleNucleotidePolymorphism();
newSNP.setRsID(reportedSNP);
// Save SNP
singleNucleotidePolymorphismRepository.save(newSNP);
// Create link in XREF table and save
SingleNucleotidePolymorphismXref newSNPXref = new SingleNucleotidePolymorphismXref();
newSNPXref.setAssociationID(associationId);
newSNPXref.setSnpID(newSNP.getId());
singleNucleotidePolymorphismXrefRepository.save(newSNPXref);
checkedSNPs.add(newSNP.getId());
} else {
// Does association already have a link to that snp?
SingleNucleotidePolymorphismXref associationSNPlink = singleNucleotidePolymorphismXrefRepository.findByAssociationIDAndSnpID(associationId, snp.getId());
if (associationSNPlink == null) {
// Create link in XREF table and save
SingleNucleotidePolymorphismXref newSNPXref = new SingleNucleotidePolymorphismXref();
newSNPXref.setAssociationID(associationId);
newSNPXref.setSnpID(snp.getId());
singleNucleotidePolymorphismXrefRepository.save(newSNPXref);
}
checkedSNPs.add(snp.getId());
}
}
// Compare to snps originally linked to our association
Collection<SingleNucleotidePolymorphismXref> snpsLinkedToAssociation = singleNucleotidePolymorphismXrefRepository.findByAssociationID(associationId);
for (SingleNucleotidePolymorphismXref snpLinkedToAssociation : snpsLinkedToAssociation) {
//Check if the snp ID is in array of already checked snps, if mot delete association
if (!checkedSNPs.contains(snpLinkedToAssociation.getSnpID())) {
singleNucleotidePolymorphismXrefRepository.delete(snpLinkedToAssociation);
}
}
return "redirect:/associations/" + associationId + "/snps";
}
/* Model Attributes :
* Used for dropdowns in HTML forms
*/
// EFO traits
@ModelAttribute("efoTraits")
public List<EFOTrait> populateEFOTraits(Model model) {
return efoTraitRepository.findAll();
}
}
|
goci-interfaces/goci-curation/src/main/java/uk/ac/ebi/spot/goci/curation/controller/AssociationController.java
|
package uk.ac.ebi.spot.goci.curation.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import uk.ac.ebi.spot.goci.curation.model.Association;
import uk.ac.ebi.spot.goci.curation.model.EFOTrait;
import uk.ac.ebi.spot.goci.curation.model.SingleNucleotidePolymorphism;
import uk.ac.ebi.spot.goci.curation.model.SingleNucleotidePolymorphismXref;
import uk.ac.ebi.spot.goci.curation.repository.*;
import uk.ac.ebi.spot.goci.curation.service.CuratorReportedSNP;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Created by emma on 06/01/15.
*
* @author emma
* Association controller, interpret user input and transform it into a snp/association
* model that is represented to the user by the associated HTML page. Used to view, add and edit
* existing snp/assocaition information. Also creates entry in SNP table for any new SNPS entered in html form
*/
@Controller
public class AssociationController {
private AssociationRepository associationRepository;
private StudyRepository studyRepository;
private EFOTraitRepository efoTraitRepository;
private SingleNucleotidePolymorphismRepository singleNucleotidePolymorphismRepository;
private SingleNucleotidePolymorphismXrefRepository singleNucleotidePolymorphismXrefRepository;
@Autowired
public AssociationController(AssociationRepository associationRepository, StudyRepository studyRepository, EFOTraitRepository efoTraitRepository, SingleNucleotidePolymorphismRepository singleNucleotidePolymorphismRepository, SingleNucleotidePolymorphismXrefRepository singleNucleotidePolymorphismXrefRepository) {
this.associationRepository = associationRepository;
this.studyRepository = studyRepository;
this.efoTraitRepository = efoTraitRepository;
this.singleNucleotidePolymorphismRepository = singleNucleotidePolymorphismRepository;
this.singleNucleotidePolymorphismXrefRepository = singleNucleotidePolymorphismXrefRepository;
}
/* SNP/Associations associated with a study */
// Generate list of SNP associations linked to a study
@RequestMapping(value = "/studies/{studyId}/associations", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.GET)
public String viewStudySnps(Model model, @PathVariable String studyId) {
Collection<Association> associations = new ArrayList<>();
associations.addAll(associationRepository.findByStudyID(studyId));
model.addAttribute("studyAssociations", associations);
// Return an empty association object so curators can add new association/snp information to study
model.addAttribute("studyAssociation", new Association());
model.addAttribute("reportedSNPs", new CuratorReportedSNP());
// Also passes back study object to view so we can create links back to main study page
model.addAttribute("study", studyRepository.findOne(Long.valueOf(studyId).longValue()));
return "study_association";
}
// Add new association/snp information to a study
@RequestMapping(value = "/studies/{studyId}/associations", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.POST)
public String addStudySnps(@ModelAttribute CuratorReportedSNP reportedSNPs, @ModelAttribute Association studyAssociation, @PathVariable String studyId) {
// Set the study ID for our association
studyAssociation.setStudyID(studyId);
// Save our association information
associationRepository.save(studyAssociation);
// ReportedSNPs object holds a collection of SNPs entered by curator
// For each SNP entered we need need to create an entry in the SNP table
// TODO NEED TO ONLY CREATE ENTRY IN SNP TABLE IF IT DOESNT ALREADY EXITS
for (String snp : reportedSNPs.getReportedSNPValue()) {
// Create new SNP
SingleNucleotidePolymorphism newSNP = new SingleNucleotidePolymorphism();
newSNP.setRsID(snp);
// Save SNP
singleNucleotidePolymorphismRepository.save(newSNP);
// Create link in XREF table and save
SingleNucleotidePolymorphismXref newSNPXref = new SingleNucleotidePolymorphismXref();
newSNPXref.setAssociationID(studyAssociation.getId());
newSNPXref.setSnpID(newSNP.getId());
singleNucleotidePolymorphismXrefRepository.save(newSNPXref);
}
return "redirect:/studies/" + studyId + "/associations";
}
/* Existing association information */
// View association information
@RequestMapping(value = "/associations/{associationId}", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.GET)
public String viewAssociation(Model model, @PathVariable Long associationId) {
// Return association with that ID
Association associationToView = associationRepository.findOne(associationId);
model.addAttribute("studyAssociation", associationToView);
// Find all cross-references to associated SNPs
Collection<SingleNucleotidePolymorphismXref> xrefs = new ArrayList<>();
xrefs.addAll(singleNucleotidePolymorphismXrefRepository.findByAssociationID(associationId));
// For each XREF get the SNP ID
Collection<Long> snpIDs = new ArrayList<>();
for (SingleNucleotidePolymorphismXref xref: xrefs){
snpIDs.add(xref.getSnpID());
}
// Get rsID of SNPs associated with those IDs and return to HTML form
Collection<String> associationSNPs= new ArrayList<>();
for(Long snpID: snpIDs){
SingleNucleotidePolymorphism associationSNP = singleNucleotidePolymorphismRepository.findOne(snpID);
associationSNPs.add(associationSNP.getRsID());
}
// Return list of SNPs entered
CuratorReportedSNP curatorReportedSNP= new CuratorReportedSNP();
curatorReportedSNP.setReportedSNPValue(associationSNPs);
// Return curator added snps for editing
model.addAttribute("reportedSNPs", curatorReportedSNP);
return "edit_association";
}
// Edit existing assoication information
@RequestMapping(value = "/associations/{associationId}", produces = MediaType.TEXT_HTML_VALUE, method = RequestMethod.POST)
public String editAssociation(@ModelAttribute Association association) {
// TODO ALSO DEAL WITH RETURNED SNPS
// Saves the new information returned from form
associationRepository.save(association);
return "redirect:/studies/" + association.getStudyID() + "/associations";
}
/* Model Attributes :
* Used for dropdowns in HTML forms
*/
// EFO traits
@ModelAttribute("efoTraits")
public List<EFOTrait> populateEFOTraits(Model model) {
return efoTraitRepository.findAll();
}
}
|
Added method to check edit snps
|
goci-interfaces/goci-curation/src/main/java/uk/ac/ebi/spot/goci/curation/controller/AssociationController.java
|
Added method to check edit snps
|
|
Java
|
apache-2.0
|
2d4872e754a881bbfcad4afd8b7138aa44a1426d
| 0
|
knightingal/android_start_demo
|
package com.example.jianming.myapplication;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.app.Activity;
import android.graphics.Point;
import android.graphics.Rect;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.animation.DecelerateInterpolator;
import android.widget.ImageView;
public class ZoomActivity extends Activity {
// Hold a reference to the current animator,
// so that it can be canceled mid-way.
private Animator mCurrentAnimator;
private int mShortAnimationDuration;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_zoom);
// Hook up clicks on the thumbnail views.
final View thumb1View = findViewById(R.id.thumb_button_1);
thumb1View.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
zoomImageFromThumb(thumb1View, R.drawable.anya);
}
});
// Retrieve and cache the system's default "short" animation time.
mShortAnimationDuration = getResources().getInteger(
android.R.integer.config_shortAnimTime);
}
private void zoomImageFromThumb(final View thumbView, int imageResId) {
// If there's an animation in progress, cancel it
// immediately and proceed with this one.
if (mCurrentAnimator != null) {
mCurrentAnimator.cancel();
}
// Load the high-resolution "zoomed-in" image.
final ImageView expandedImageView = (ImageView) findViewById(
R.id.expanded_image);
expandedImageView.setImageResource(imageResId);
// Calculate the starting and ending bounds for the zoomed-in image.
// This step involves lots of math. Yay, math.
final Rect startBounds = new Rect();
final Rect finalBounds = new Rect();
final Point globalOffset = new Point();
// The start bounds are the global visible rectangle of the thumbnail,
// and the final bounds are the global visible rectangle of the container
// view. Also set the container view's offset as the origin for the
// bounds, since that's the origin for the positioning animation
// properties (X, Y).
thumbView.getGlobalVisibleRect(startBounds);
findViewById(R.id.container)
.getGlobalVisibleRect(finalBounds, globalOffset);
startBounds.offset(-globalOffset.x, -globalOffset.y);
finalBounds.offset(-globalOffset.x, -globalOffset.y);
// Adjust the start bounds to be the same aspect ratio as the final
// bounds using the "center crop" technique. This prevents undesirable
// stretching during the animation. Also calculate the start scaling
// factor (the end scaling factor is always 1.0).
float startScale;
if ((float) finalBounds.width() / finalBounds.height()
> (float) startBounds.width() / startBounds.height()) {
// Extend start bounds horizontally
startScale = (float) startBounds.height() / finalBounds.height();
float startWidth = startScale * finalBounds.width();
float deltaWidth = (startWidth - startBounds.width()) / 2;
startBounds.left -= deltaWidth;
startBounds.right += deltaWidth;
} else {
// Extend start bounds vertically
startScale = (float) startBounds.width() / finalBounds.width();
float startHeight = startScale * finalBounds.height();
float deltaHeight = (startHeight - startBounds.height()) / 2;
startBounds.top -= deltaHeight;
startBounds.bottom += deltaHeight;
}
// Hide the thumbnail and show the zoomed-in view. When the animation
// begins, it will position the zoomed-in view in the place of the
// thumbnail.
thumbView.setAlpha(0f);
expandedImageView.setVisibility(View.VISIBLE);
// Set the pivot point for SCALE_X and SCALE_Y transformations
// to the top-left corner of the zoomed-in view (the default
// is the center of the view).
expandedImageView.setPivotX(0f);
expandedImageView.setPivotY(0f);
// Construct and run the parallel animation of the four translation and
// scale properties (X, Y, SCALE_X, and SCALE_Y).
AnimatorSet set = new AnimatorSet();
set
.play(ObjectAnimator.ofFloat(expandedImageView, View.X,
startBounds.left, finalBounds.left))
.with(ObjectAnimator.ofFloat(expandedImageView, View.Y,
startBounds.top, finalBounds.top))
.with(ObjectAnimator.ofFloat(expandedImageView, View.SCALE_X,
startScale, 1f)).with(ObjectAnimator.ofFloat(expandedImageView,
View.SCALE_Y, startScale, 1f));
set.setDuration(mShortAnimationDuration);
set.setInterpolator(new DecelerateInterpolator());
set.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mCurrentAnimator = null;
}
@Override
public void onAnimationCancel(Animator animation) {
mCurrentAnimator = null;
}
});
set.start();
mCurrentAnimator = set;
// Upon clicking the zoomed-in image, it should zoom back down
// to the original bounds and show the thumbnail instead of
// the expanded image.
final float startScaleFinal = startScale;
expandedImageView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mCurrentAnimator != null) {
mCurrentAnimator.cancel();
}
// Animate the four positioning/sizing properties in parallel,
// back to their original values.
AnimatorSet set = new AnimatorSet();
set.play(ObjectAnimator
.ofFloat(expandedImageView, View.X, startBounds.left))
.with(ObjectAnimator
.ofFloat(expandedImageView,
View.Y, startBounds.top))
.with(ObjectAnimator
.ofFloat(expandedImageView,
View.SCALE_X, startScaleFinal))
.with(ObjectAnimator
.ofFloat(expandedImageView,
View.SCALE_Y, startScaleFinal));
set.setDuration(mShortAnimationDuration);
set.setInterpolator(new DecelerateInterpolator());
set.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
thumbView.setAlpha(1f);
expandedImageView.setVisibility(View.GONE);
mCurrentAnimator = null;
}
@Override
public void onAnimationCancel(Animator animation) {
thumbView.setAlpha(1f);
expandedImageView.setVisibility(View.GONE);
mCurrentAnimator = null;
}
});
set.start();
mCurrentAnimator = set;
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
Log.i("MainActivity", "onOptionsItemSelected" + id);
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
app/src/main/java/com/example/jianming/myapplication/ZoomActivity.java
|
package com.example.jianming.myapplication;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.app.Activity;
import android.graphics.Point;
import android.graphics.Rect;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.animation.DecelerateInterpolator;
import android.widget.ImageView;
public class ZoomActivity extends Activity {
// Hold a reference to the current animator,
// so that it can be canceled mid-way.
private Animator mCurrentAnimator;
// The system "short" animation time duration, in milliseconds. This
// duration is ideal for subtle animations or animations that occur
// very frequently.
private int mShortAnimationDuration;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_zoom);
// Hook up clicks on the thumbnail views.
final View thumb1View = findViewById(R.id.thumb_button_1);
thumb1View.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
zoomImageFromThumb(thumb1View, R.drawable.anya);
}
});
// Retrieve and cache the system's default "short" animation time.
mShortAnimationDuration = getResources().getInteger(
android.R.integer.config_shortAnimTime);
}
private void zoomImageFromThumb(final View thumbView, int imageResId) {
// If there's an animation in progress, cancel it
// immediately and proceed with this one.
if (mCurrentAnimator != null) {
mCurrentAnimator.cancel();
}
// Load the high-resolution "zoomed-in" image.
final ImageView expandedImageView = (ImageView) findViewById(
R.id.expanded_image);
expandedImageView.setImageResource(imageResId);
// Calculate the starting and ending bounds for the zoomed-in image.
// This step involves lots of math. Yay, math.
final Rect startBounds = new Rect();
final Rect finalBounds = new Rect();
final Point globalOffset = new Point();
// The start bounds are the global visible rectangle of the thumbnail,
// and the final bounds are the global visible rectangle of the container
// view. Also set the container view's offset as the origin for the
// bounds, since that's the origin for the positioning animation
// properties (X, Y).
thumbView.getGlobalVisibleRect(startBounds);
findViewById(R.id.container)
.getGlobalVisibleRect(finalBounds, globalOffset);
startBounds.offset(-globalOffset.x, -globalOffset.y);
finalBounds.offset(-globalOffset.x, -globalOffset.y);
// Adjust the start bounds to be the same aspect ratio as the final
// bounds using the "center crop" technique. This prevents undesirable
// stretching during the animation. Also calculate the start scaling
// factor (the end scaling factor is always 1.0).
float startScale;
if ((float) finalBounds.width() / finalBounds.height()
> (float) startBounds.width() / startBounds.height()) {
// Extend start bounds horizontally
startScale = (float) startBounds.height() / finalBounds.height();
float startWidth = startScale * finalBounds.width();
float deltaWidth = (startWidth - startBounds.width()) / 2;
startBounds.left -= deltaWidth;
startBounds.right += deltaWidth;
} else {
// Extend start bounds vertically
startScale = (float) startBounds.width() / finalBounds.width();
float startHeight = startScale * finalBounds.height();
float deltaHeight = (startHeight - startBounds.height()) / 2;
startBounds.top -= deltaHeight;
startBounds.bottom += deltaHeight;
}
// Hide the thumbnail and show the zoomed-in view. When the animation
// begins, it will position the zoomed-in view in the place of the
// thumbnail.
thumbView.setAlpha(0f);
expandedImageView.setVisibility(View.VISIBLE);
// Set the pivot point for SCALE_X and SCALE_Y transformations
// to the top-left corner of the zoomed-in view (the default
// is the center of the view).
expandedImageView.setPivotX(0f);
expandedImageView.setPivotY(0f);
// Construct and run the parallel animation of the four translation and
// scale properties (X, Y, SCALE_X, and SCALE_Y).
AnimatorSet set = new AnimatorSet();
set
.play(ObjectAnimator.ofFloat(expandedImageView, View.X,
startBounds.left, finalBounds.left))
.with(ObjectAnimator.ofFloat(expandedImageView, View.Y,
startBounds.top, finalBounds.top))
.with(ObjectAnimator.ofFloat(expandedImageView, View.SCALE_X,
startScale, 1f)).with(ObjectAnimator.ofFloat(expandedImageView,
View.SCALE_Y, startScale, 1f));
set.setDuration(mShortAnimationDuration);
set.setInterpolator(new DecelerateInterpolator());
set.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mCurrentAnimator = null;
}
@Override
public void onAnimationCancel(Animator animation) {
mCurrentAnimator = null;
}
});
set.start();
mCurrentAnimator = set;
// Upon clicking the zoomed-in image, it should zoom back down
// to the original bounds and show the thumbnail instead of
// the expanded image.
final float startScaleFinal = startScale;
expandedImageView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mCurrentAnimator != null) {
mCurrentAnimator.cancel();
}
// Animate the four positioning/sizing properties in parallel,
// back to their original values.
AnimatorSet set = new AnimatorSet();
set.play(ObjectAnimator
.ofFloat(expandedImageView, View.X, startBounds.left))
.with(ObjectAnimator
.ofFloat(expandedImageView,
View.Y, startBounds.top))
.with(ObjectAnimator
.ofFloat(expandedImageView,
View.SCALE_X, startScaleFinal))
.with(ObjectAnimator
.ofFloat(expandedImageView,
View.SCALE_Y, startScaleFinal));
set.setDuration(mShortAnimationDuration);
set.setInterpolator(new DecelerateInterpolator());
set.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
thumbView.setAlpha(1f);
expandedImageView.setVisibility(View.GONE);
mCurrentAnimator = null;
}
@Override
public void onAnimationCancel(Animator animation) {
thumbView.setAlpha(1f);
expandedImageView.setVisibility(View.GONE);
mCurrentAnimator = null;
}
});
set.start();
mCurrentAnimator = set;
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
Log.i("MainActivity", "onOptionsItemSelected" + id);
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
commit from coding webIDE
|
app/src/main/java/com/example/jianming/myapplication/ZoomActivity.java
|
commit from coding webIDE
|
|
Java
|
apache-2.0
|
f43d678cc0b2923b543bcd43a4fc8a6bd44617e9
| 0
|
lettuce-io/lettuce-core,mp911de/lettuce,lettuce-io/lettuce-core,mp911de/lettuce,lettuce-io/lettuce-core,lettuce-io/lettuce-core
|
package com.lambdaworks.redis.commands.rx;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import com.lambdaworks.redis.internal.AbstractInvocationHandler;
import rx.Observable;
import com.lambdaworks.redis.api.StatefulConnection;
import com.lambdaworks.redis.api.StatefulRedisConnection;
import com.lambdaworks.redis.api.sync.RedisCommands;
import com.lambdaworks.redis.cluster.api.StatefulRedisClusterConnection;
import com.lambdaworks.redis.internal.LettuceLists;
import com.lambdaworks.redis.internal.LettuceSets;
import com.lambdaworks.redis.sentinel.api.StatefulRedisSentinelConnection;
import com.lambdaworks.redis.sentinel.api.sync.RedisSentinelCommands;
/**
* Invocation handler for testing purposes.
* @param <K>
* @param <V>
*/
public class RxSyncInvocationHandler<K, V> extends AbstractInvocationHandler {
private final StatefulConnection<?, ?> connection;
private final Object rxApi;
public RxSyncInvocationHandler(StatefulConnection<?, ?> connection, Object rxApi) {
this.connection = connection;
this.rxApi = rxApi;
}
@Override
@SuppressWarnings("unchecked")
protected Object handleInvocation(Object proxy, Method method, Object[] args) throws Throwable {
try {
Method targetMethod = rxApi.getClass().getMethod(method.getName(), method.getParameterTypes());
Object result = targetMethod.invoke(rxApi, args);
if (result == null || !(result instanceof Observable<?>)) {
return result;
}
Observable<?> observable = (Observable<?>) result;
if (!method.getName().equals("exec") && !method.getName().equals("multi")) {
if (connection instanceof StatefulRedisConnection && ((StatefulRedisConnection) connection).isMulti()) {
observable.subscribe();
return null;
}
}
List<?> value = observable.toList().toBlocking().first();
if (method.getReturnType().equals(List.class)) {
return value;
}
if (method.getReturnType().equals(Set.class)) {
return LettuceSets.newHashSet(value);
}
if (!value.isEmpty()) {
return value.get(0);
}
return null;
} catch (InvocationTargetException e) {
throw e.getTargetException();
}
}
public static <K, V> RedisCommands<K, V> sync(StatefulRedisConnection<K, V> connection) {
RxSyncInvocationHandler<K, V> handler = new RxSyncInvocationHandler<>(connection, connection.reactive());
return (RedisCommands<K, V>) Proxy.newProxyInstance(handler.getClass().getClassLoader(),
new Class<?>[] { RedisCommands.class }, handler);
}
public static <K, V> RedisCommands<K, V> sync(StatefulRedisClusterConnection<K, V> connection) {
RxSyncInvocationHandler<K, V> handler = new RxSyncInvocationHandler<>(connection, connection.reactive());
return (RedisCommands<K, V>) Proxy.newProxyInstance(handler.getClass().getClassLoader(),
new Class<?>[] { RedisCommands.class }, handler);
}
public static <K, V> RedisSentinelCommands<K, V> sync(StatefulRedisSentinelConnection<K, V> connection) {
RxSyncInvocationHandler<K, V> handler = new RxSyncInvocationHandler<>(connection, connection.reactive());
return (RedisSentinelCommands<K, V>) Proxy.newProxyInstance(handler.getClass().getClassLoader(),
new Class<?>[] { RedisSentinelCommands.class }, handler);
}
}
|
src/test/java/com/lambdaworks/redis/commands/rx/RxSyncInvocationHandler.java
|
package com.lambdaworks.redis.commands.rx;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import com.lambdaworks.redis.internal.AbstractInvocationHandler;
import rx.Observable;
import com.lambdaworks.redis.api.StatefulConnection;
import com.lambdaworks.redis.api.StatefulRedisConnection;
import com.lambdaworks.redis.api.sync.RedisCommands;
import com.lambdaworks.redis.cluster.api.StatefulRedisClusterConnection;
import com.lambdaworks.redis.internal.LettuceLists;
import com.lambdaworks.redis.internal.LettuceSets;
import com.lambdaworks.redis.sentinel.api.StatefulRedisSentinelConnection;
import com.lambdaworks.redis.sentinel.api.sync.RedisSentinelCommands;
/**
* Invocation handler for testing purposes.
* @param <K>
* @param <V>
*/
public class RxSyncInvocationHandler<K, V> extends AbstractInvocationHandler {
private final StatefulConnection<?, ?> connection;
private final Object rxApi;
public RxSyncInvocationHandler(StatefulConnection<?, ?> connection, Object rxApi) {
this.connection = connection;
this.rxApi = rxApi;
}
@Override
@SuppressWarnings("unchecked")
protected Object handleInvocation(Object proxy, Method method, Object[] args) throws Throwable {
try {
Method targetMethod = rxApi.getClass().getMethod(method.getName(), method.getParameterTypes());
Object result = targetMethod.invoke(rxApi, args);
if (result == null || !(result instanceof Observable<?>)) {
return result;
}
Observable<?> observable = (Observable<?>) result;
if (!method.getName().equals("exec") && !method.getName().equals("multi")) {
if (connection instanceof StatefulRedisConnection && ((StatefulRedisConnection) connection).isMulti()) {
observable.subscribe();
return null;
}
}
Iterable<?> objects = observable.toBlocking().toIterable();
if (method.getReturnType().equals(List.class)) {
return LettuceLists.newList(objects);
}
if (method.getReturnType().equals(Set.class)) {
return LettuceSets.newHashSet(objects);
}
Iterator<?> iterator = objects.iterator();
if (iterator.hasNext()) {
return iterator.next();
}
return null;
} catch (InvocationTargetException e) {
throw e.getTargetException();
}
}
public static <K, V> RedisCommands<K, V> sync(StatefulRedisConnection<K, V> connection) {
RxSyncInvocationHandler<K, V> handler = new RxSyncInvocationHandler<>(connection, connection.reactive());
return (RedisCommands<K, V>) Proxy.newProxyInstance(handler.getClass().getClassLoader(),
new Class<?>[] { RedisCommands.class }, handler);
}
public static <K, V> RedisCommands<K, V> sync(StatefulRedisClusterConnection<K, V> connection) {
RxSyncInvocationHandler<K, V> handler = new RxSyncInvocationHandler<>(connection, connection.reactive());
return (RedisCommands<K, V>) Proxy.newProxyInstance(handler.getClass().getClassLoader(),
new Class<?>[] { RedisCommands.class }, handler);
}
public static <K, V> RedisSentinelCommands<K, V> sync(StatefulRedisSentinelConnection<K, V> connection) {
RxSyncInvocationHandler<K, V> handler = new RxSyncInvocationHandler<>(connection, connection.reactive());
return (RedisSentinelCommands<K, V>) Proxy.newProxyInstance(handler.getClass().getClassLoader(),
new Class<?>[] { RedisSentinelCommands.class }, handler);
}
}
|
Fix RxSyncInvocationHandler
Switch to exception propagation so errors are propagated after onNext was called.
|
src/test/java/com/lambdaworks/redis/commands/rx/RxSyncInvocationHandler.java
|
Fix RxSyncInvocationHandler
|
|
Java
|
apache-2.0
|
fc763ea0fc3e664f053e0ef5963341e9942fa7cf
| 0
|
olehmberg/winter
|
/*
* Copyright (c) 2017 Data and Web Science Group, University of Mannheim, Germany (http://dws.informatik.uni-mannheim.de/)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package de.uni_mannheim.informatik.dws.winter.matching.blockers;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import de.uni_mannheim.informatik.dws.winter.matching.blockers.generators.BlockingKeyGenerator;
import de.uni_mannheim.informatik.dws.winter.model.Correspondence;
import de.uni_mannheim.informatik.dws.winter.model.DataSet;
import de.uni_mannheim.informatik.dws.winter.model.LeftIdentityPair;
import de.uni_mannheim.informatik.dws.winter.model.Matchable;
import de.uni_mannheim.informatik.dws.winter.model.Pair;
import de.uni_mannheim.informatik.dws.winter.processing.DataIterator;
import de.uni_mannheim.informatik.dws.winter.processing.PairFirstJoinKeyGenerator;
import de.uni_mannheim.informatik.dws.winter.processing.Processable;
import de.uni_mannheim.informatik.dws.winter.processing.ProcessableCollection;
import de.uni_mannheim.informatik.dws.winter.processing.RecordMapper;
import de.uni_mannheim.informatik.dws.winter.processing.aggregators.DistributionAggregator;
import de.uni_mannheim.informatik.dws.winter.processing.aggregators.StringConcatenationAggregator;
import de.uni_mannheim.informatik.dws.winter.utils.Distribution;
import de.uni_mannheim.informatik.dws.winter.utils.query.Q;
/**
* Implementation of a standard {@link AbstractBlocker} based on blocking keys. All records for which the same blocking key is generated are returned as pairs.
*
* @author Oliver Lehmberg (oli@dwslab.de)
*
* @param <RecordType> the type of records which are the input for the blocking operation
* @param <SchemaElementType> the type of schema elements that are used in the schema of RecordType
* @param <CorrespondenceType> the type of correspondences which are the input for the blocking operation
* @param <BlockedType> the type of record which is actually blocked
*/
public class StandardBlocker<RecordType extends Matchable, SchemaElementType extends Matchable, BlockedType extends Matchable, CorrespondenceType extends Matchable>
extends AbstractBlocker<RecordType, BlockedType, CorrespondenceType>
implements Blocker<RecordType, SchemaElementType, BlockedType, CorrespondenceType>,
SymmetricBlocker<RecordType, SchemaElementType, BlockedType, CorrespondenceType>
{
private BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> blockingFunction;
private BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> secondBlockingFunction;
private boolean measureBlockSizes = false;
private double blockFilterRatio = 1.0;
private int maxBlockPairSize = 0;
private boolean deduplicatePairs = true;
/**
* @param measureBlockSizes the measureBlockSizes to set
*/
public void setMeasureBlockSizes(boolean measureBlockSizes) {
this.measureBlockSizes = measureBlockSizes;
}
/**
* @param blockFilterRatio the blockFilterRatio to set
*/
public void setBlockFilterRatio(double blockFilterRatio) {
this.blockFilterRatio = blockFilterRatio;
}
/**
* Sets the maximum number of pairs that can be produced by each block, blocks with more pairs are removed completely.
* Ignored if set to 0.
*
* @param maxBlockPairSize the maximum number of pairs that can be produced by each block
*/
public void setMaxBlockPairSize(int maxBlockPairSize) {
this.maxBlockPairSize = maxBlockPairSize;
}
/**
* @param deduplicatePairs the deduplicatePairs to set
*/
public void setDeduplicatePairs(boolean deduplicatePairs) {
this.deduplicatePairs = deduplicatePairs;
}
public StandardBlocker(BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> blockingFunction) {
this.blockingFunction = blockingFunction;
this.secondBlockingFunction = blockingFunction;
}
/**
*
* Creates a new Standard Blocker with the given blocking function(s).
* If two datasets are used and secondBlockingFunction is not null, secondBlockingFunction will be used for the second dataset. If it is null, blockingFunction will be used for both datasets
*
* @param blockingFunction the blocking function for the first dataset
* @param secondBlockingFunction the blocking function for the second dataset
*/
public StandardBlocker(BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> blockingFunction, BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> secondBlockingFunction) {
this.blockingFunction = blockingFunction;
this.secondBlockingFunction = secondBlockingFunction == null ? blockingFunction : secondBlockingFunction;
}
/* (non-Javadoc)
* @see de.uni_mannheim.informatik.wdi.matching.blocking.Blocker#runBlocking(de.uni_mannheim.informatik.wdi.model.DataSet, de.uni_mannheim.informatik.wdi.model.DataSet, de.uni_mannheim.informatik.wdi.model.ResultSet, de.uni_mannheim.informatik.wdi.matching.MatchingEngine)
*/
@Override
public Processable<Correspondence<BlockedType, CorrespondenceType>> runBlocking(
DataSet<RecordType, SchemaElementType> dataset1,
DataSet<RecordType, SchemaElementType> dataset2,
Processable<Correspondence<CorrespondenceType, Matchable>> schemaCorrespondences){
// combine the datasets with the schema correspondences
Processable<Pair<RecordType, Processable<Correspondence<CorrespondenceType, Matchable>>>> ds1 = combineDataWithCorrespondences(dataset1, schemaCorrespondences, (r,c)->c.next(new Pair<>(r.getFirstRecord().getDataSourceIdentifier(),r)));
Processable<Pair<RecordType, Processable<Correspondence<CorrespondenceType, Matchable>>>> ds2 = combineDataWithCorrespondences(dataset2, schemaCorrespondences, (r,c)->c.next(new Pair<>(r.getSecondRecord().getDataSourceIdentifier(),r)));
// if we group the records by blocking key, we can obtain duplicates for BlockedType if it is different from RecordType and multiple records generated the same blocking key for BlockedType
// so we aggregate the results to get a unique set of BlockedType elements (using the DistributionAggregator)
// create the blocking keys for the first data set
// results in pairs of [blocking key], distribution of correspondences
Processable<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> grouped1 =
ds1.aggregate(blockingFunction, new DistributionAggregator<String, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>() {
private static final long serialVersionUID = 1L;
@Override
public Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> getInnerKey(
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> record) {
// change the pairs such that they are considered equal if the first element is equal (ignoring the second element)
return new LeftIdentityPair<>(record.getFirst(), record.getSecond());
}
});
// create the blocking keys for the second data set
Processable<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> grouped2 =
ds2.aggregate(secondBlockingFunction, new DistributionAggregator<String, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>() {
private static final long serialVersionUID = 1L;
@Override
public Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> getInnerKey(
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> record) {
// change the pairs such that they are considered equal if the first element is equal (ignoring the second element)
return new LeftIdentityPair<>(record.getFirst(), record.getSecond());
}
});
if(measureBlockSizes) {
System.out.println(String.format("[StandardBlocker] created %d blocking keys for first dataset", grouped1.size()));
System.out.println(String.format("[StandardBlocker] created %d blocking keys for second dataset", grouped2.size()));
}
// join the datasets via their blocking keys
Processable<Pair<
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>,
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>>>
blockedData = grouped1.join(grouped2, new PairFirstJoinKeyGenerator<>());
if(measureBlockSizes) {
System.out.println(String.format("[StandardBlocker] created %d blocks from blocking keys", blockedData.size()));
}
if(maxBlockPairSize>0) {
blockedData = blockedData
.where(
(p)->((long)p.getFirst().getSecond().getNumElements() * (long)p.getSecond().getSecond().getNumElements()) <= maxBlockPairSize
);
if(measureBlockSizes) {
System.out.println(String.format("[StandardBlocker] %d blocks after filtering by max block size (<= %d pairs)", blockedData.size(), maxBlockPairSize));
}
}
// remove the largest blocks, if requested
if(blockFilterRatio<1.0) {
System.out.println(String.format("[StandardBlocker] %d blocks before filtering", blockedData.size()));
Processable<Pair<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>, Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>>> toRemove = blockedData
.sort((p)->p.getFirst().getSecond().getNumElements()*p.getSecond().getSecond().getNumElements(), false)
.take((int)(blockedData.size()*(1-blockFilterRatio)));
if(measureBlockSizes) {
for(Pair<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>, Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> p : toRemove.get()) {
System.out.println(String.format("\tRemoving block '%s' (%d pairs)",
p.getFirst().getFirst(),
p.getFirst().getSecond().getNumElements() * p.getSecond().getSecond().getNumElements()));
}
}
blockedData = blockedData
.sort((p)->p.getFirst().getSecond().getNumElements()*p.getSecond().getSecond().getNumElements(), true)
.take((int)(blockedData.size()*blockFilterRatio));
System.out.println(String.format("[StandardBlocker] %d blocks after filtering", blockedData.size()));
}
if(measureBlockSizes) {
// calculate block size distribution
Processable<Pair<Integer, Distribution<Integer>>> aggregated = blockedData.aggregate(
(Pair<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>, Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> record,
DataIterator<Pair<Integer, Integer>> resultCollector)
-> {
int blockSize = record.getFirst().getSecond().getNumElements() * record.getSecond().getSecond().getNumElements();
resultCollector.next(new Pair<Integer, Integer>(0, blockSize));
}
, new DistributionAggregator<Integer, Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer getInnerKey(Integer record) {
return record;
}
});
Pair<Integer, Distribution<Integer>> aggregationResult = Q.firstOrDefault(aggregated.get());
if(aggregationResult!=null) {
Distribution<Integer> dist = aggregationResult.getSecond();
System.out.println("[StandardBlocker] Block size distribution:");
System.out.println(dist.format());
// determine frequent blocking key values
Processable<Pair<Integer, String>> blockValues = blockedData.aggregate(
(Pair<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>, Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> record,
DataIterator<Pair<Integer, String>> resultCollector)
-> {
int blockSize = record.getFirst().getSecond().getNumElements() * record.getSecond().getSecond().getNumElements();
resultCollector.next(new Pair<Integer, String>(blockSize, record.getFirst().getFirst()));
},
new StringConcatenationAggregator<>(","))
.sort((p)->p.getFirst(), false);
System.out.println("50 most-frequent blocking key values:");
for(Pair<Integer, String> value : blockValues.take(50).get()) {
System.out.println(String.format("\t%d\t%s", value.getFirst(), value.getSecond()));
}
} else {
System.out.println("No blocks were created!");
}
}
// transform the blocks into pairs of records
Processable<Correspondence<BlockedType, CorrespondenceType>> result = blockedData.map(new RecordMapper<Pair<
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>,
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>>,
Correspondence<BlockedType, CorrespondenceType>>() {
private static final long serialVersionUID = 1L;
@Override
public void mapRecord(
Pair<
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>,
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> record,
DataIterator<Correspondence<BlockedType, CorrespondenceType>> resultCollector) {
// iterate over the left pairs [blocked element],[correspondences]
for(Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> p1 : record.getFirst().getSecond().getElements()){
BlockedType record1 = p1.getFirst();
// iterate over the right pairs [blocked element],[correspondences]
for(Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> p2 : record.getSecond().getSecond().getElements()){
BlockedType record2 = p2.getFirst();
Processable<Correspondence<CorrespondenceType, Matchable>> causes =
new ProcessableCollection<>(p1.getSecond())
.append(p2.getSecond())
.distinct();
int[] pairIds = new int[] { p1.getFirst().getDataSourceIdentifier(), p2.getFirst().getDataSourceIdentifier() };
Arrays.sort(pairIds);
// filter the correspondences such that only correspondences between the two records are contained (by data source id)
causes = causes.where((c)-> {
int[] causeIds = new int[] { c.getFirstRecord().getDataSourceIdentifier(), c.getSecondRecord().getDataSourceIdentifier() };
Arrays.sort(causeIds);
return Arrays.equals(pairIds, causeIds);
});
resultCollector.next(new Correspondence<BlockedType, CorrespondenceType>(record1, record2, 1.0, causes));
}
}
}
});
if(deduplicatePairs) {
//use .distinct() to remove correspondences that are found in multiple blocks
result = result.distinct();
}
calculatePerformance(dataset1, dataset2, result);
return result;
}
/* (non-Javadoc)
* @see de.uni_mannheim.informatik.wdi.matching.blocking.Blocker#runBlocking(de.uni_mannheim.informatik.wdi.model.DataSet, boolean, de.uni_mannheim.informatik.wdi.model.ResultSet, de.uni_mannheim.informatik.wdi.matching.MatchingEngine)
*/
@Override
public Processable<Correspondence<BlockedType, CorrespondenceType>> runBlocking(
DataSet<RecordType, SchemaElementType> dataset,
Processable<Correspondence<CorrespondenceType, Matchable>> schemaCorrespondences) {
// combine the datasets with the schema correspondences
// as we only use one dataset here, we don't know if the record is on the left- or right-hand side of the correspondence
Processable<Pair<RecordType, Processable<Correspondence<CorrespondenceType, Matchable>>>> ds = combineDataWithCorrespondences(dataset, schemaCorrespondences,
(r,c)->
{
c.next(new Pair<>(r.getFirstRecord().getDataSourceIdentifier(),r));
c.next(new Pair<>(r.getSecondRecord().getDataSourceIdentifier(),r));
});
// if we group the records by blocking key, we can obtain duplicates for BlockedType if it is different from RecordType and multiple records generated the same blocking key for BlockedType
// so we aggregate the results to get a unique set of BlockedType elements (using the DistributionAggregator)
// group all records by their blocking keys
Processable<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> grouped = ds.aggregate(blockingFunction, new DistributionAggregator<String, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>() {
private static final long serialVersionUID = 1L;
@Override
public Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> getInnerKey(
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> record) {
// change the pairs such that they are considered equal if the first element is equal (ignoring the second element)
return new LeftIdentityPair<>(record.getFirst(), record.getSecond());
}
});
// transform the groups into record pairs
Processable<Correspondence<BlockedType, CorrespondenceType>> blocked = grouped.map((g, collector) ->
{
List<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>> list = new ArrayList<>(g.getSecond().getElements());
// sort the list before generating the pairs, so all pairs have the lower data source id on the left-hand side.
list.sort((o1,o2)->Integer.compare(o1.getFirst().getDataSourceIdentifier(), o2.getFirst().getDataSourceIdentifier()));
for(int i = 0; i < list.size(); i++) {
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> p1 = list.get(i);
for(int j = i+1; j < list.size(); j++) {
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> p2 = list.get(j);
Processable<Correspondence<CorrespondenceType, Matchable>> causes = new ProcessableCollection<>(p1.getSecond()).append(p2.getSecond());
int[] pairIds = new int[] { p1.getFirst().getDataSourceIdentifier(), p2.getFirst().getDataSourceIdentifier() };
Arrays.sort(pairIds);
// filter the correspondences such that only correspondences between the two records (p1 & p2) are contained (by data source id)
causes = causes.where((c)->
{
int[] causeIds = new int[] { c.getFirstRecord().getDataSourceIdentifier(), c.getSecondRecord().getDataSourceIdentifier() };
Arrays.sort(causeIds);
return Arrays.equals(pairIds, causeIds);
}).distinct();
collector.next(new Correspondence<>(p1.getFirst(), p2.getFirst(), 1.0, causes));
}
}
});
// remove duplicates that were created if two records have multiple matching blocking keys
blocked = blocked.distinct();
calculatePerformance(dataset, dataset, blocked);
return blocked;
}
}
|
winter-framework/src/main/java/de/uni_mannheim/informatik/dws/winter/matching/blockers/StandardBlocker.java
|
/*
* Copyright (c) 2017 Data and Web Science Group, University of Mannheim, Germany (http://dws.informatik.uni-mannheim.de/)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package de.uni_mannheim.informatik.dws.winter.matching.blockers;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import de.uni_mannheim.informatik.dws.winter.matching.blockers.generators.BlockingKeyGenerator;
import de.uni_mannheim.informatik.dws.winter.model.Correspondence;
import de.uni_mannheim.informatik.dws.winter.model.DataSet;
import de.uni_mannheim.informatik.dws.winter.model.LeftIdentityPair;
import de.uni_mannheim.informatik.dws.winter.model.Matchable;
import de.uni_mannheim.informatik.dws.winter.model.Pair;
import de.uni_mannheim.informatik.dws.winter.processing.DataIterator;
import de.uni_mannheim.informatik.dws.winter.processing.PairFirstJoinKeyGenerator;
import de.uni_mannheim.informatik.dws.winter.processing.Processable;
import de.uni_mannheim.informatik.dws.winter.processing.ProcessableCollection;
import de.uni_mannheim.informatik.dws.winter.processing.RecordMapper;
import de.uni_mannheim.informatik.dws.winter.processing.aggregators.DistributionAggregator;
import de.uni_mannheim.informatik.dws.winter.processing.aggregators.StringConcatenationAggregator;
import de.uni_mannheim.informatik.dws.winter.utils.Distribution;
import de.uni_mannheim.informatik.dws.winter.utils.query.Q;
/**
* Implementation of a standard {@link AbstractBlocker} based on blocking keys. All records for which the same blocking key is generated are returned as pairs.
*
* @author Oliver Lehmberg (oli@dwslab.de)
*
* @param <RecordType> the type of records which are the input for the blocking operation
* @param <SchemaElementType> the type of schema elements that are used in the schema of RecordType
* @param <CorrespondenceType> the type of correspondences which are the input for the blocking operation
* @param <BlockedType> the type of record which is actually blocked
*/
public class StandardBlocker<RecordType extends Matchable, SchemaElementType extends Matchable, BlockedType extends Matchable, CorrespondenceType extends Matchable>
extends AbstractBlocker<RecordType, BlockedType, CorrespondenceType>
implements Blocker<RecordType, SchemaElementType, BlockedType, CorrespondenceType>,
SymmetricBlocker<RecordType, SchemaElementType, BlockedType, CorrespondenceType>
{
private BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> blockingFunction;
private BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> secondBlockingFunction;
private boolean measureBlockSizes = false;
private double blockFilterRatio = 1.0;
private boolean deduplicatePairs = true;
/**
* @param measureBlockSizes the measureBlockSizes to set
*/
public void setMeasureBlockSizes(boolean measureBlockSizes) {
this.measureBlockSizes = measureBlockSizes;
}
/**
* @param blockFilterRatio the blockFilterRatio to set
*/
public void setBlockFilterRatio(double blockFilterRatio) {
this.blockFilterRatio = blockFilterRatio;
}
/**
* @param deduplicatePairs the deduplicatePairs to set
*/
public void setDeduplicatePairs(boolean deduplicatePairs) {
this.deduplicatePairs = deduplicatePairs;
}
public StandardBlocker(BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> blockingFunction) {
this.blockingFunction = blockingFunction;
this.secondBlockingFunction = blockingFunction;
}
/**
*
* Creates a new Standard Blocker with the given blocking function(s).
* If two datasets are used and secondBlockingFunction is not null, secondBlockingFunction will be used for the second dataset. If it is null, blockingFunction will be used for both datasets
*
* @param blockingFunction the blocking function for the first dataset
* @param secondBlockingFunction the blocking function for the second dataset
*/
public StandardBlocker(BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> blockingFunction, BlockingKeyGenerator<RecordType, CorrespondenceType, BlockedType> secondBlockingFunction) {
this.blockingFunction = blockingFunction;
this.secondBlockingFunction = secondBlockingFunction == null ? blockingFunction : secondBlockingFunction;
}
/* (non-Javadoc)
* @see de.uni_mannheim.informatik.wdi.matching.blocking.Blocker#runBlocking(de.uni_mannheim.informatik.wdi.model.DataSet, de.uni_mannheim.informatik.wdi.model.DataSet, de.uni_mannheim.informatik.wdi.model.ResultSet, de.uni_mannheim.informatik.wdi.matching.MatchingEngine)
*/
@Override
public Processable<Correspondence<BlockedType, CorrespondenceType>> runBlocking(
DataSet<RecordType, SchemaElementType> dataset1,
DataSet<RecordType, SchemaElementType> dataset2,
Processable<Correspondence<CorrespondenceType, Matchable>> schemaCorrespondences){
// combine the datasets with the schema correspondences
Processable<Pair<RecordType, Processable<Correspondence<CorrespondenceType, Matchable>>>> ds1 = combineDataWithCorrespondences(dataset1, schemaCorrespondences, (r,c)->c.next(new Pair<>(r.getFirstRecord().getDataSourceIdentifier(),r)));
Processable<Pair<RecordType, Processable<Correspondence<CorrespondenceType, Matchable>>>> ds2 = combineDataWithCorrespondences(dataset2, schemaCorrespondences, (r,c)->c.next(new Pair<>(r.getSecondRecord().getDataSourceIdentifier(),r)));
// if we group the records by blocking key, we can obtain duplicates for BlockedType if it is different from RecordType and multiple records generated the same blocking key for BlockedType
// so we aggregate the results to get a unique set of BlockedType elements (using the DistributionAggregator)
// create the blocking keys for the first data set
// results in pairs of [blocking key], distribution of correspondences
Processable<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> grouped1 =
ds1.aggregate(blockingFunction, new DistributionAggregator<String, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>() {
private static final long serialVersionUID = 1L;
@Override
public Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> getInnerKey(
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> record) {
// change the pairs such that they are considered equal if the first element is equal (ignoring the second element)
return new LeftIdentityPair<>(record.getFirst(), record.getSecond());
}
});
// create the blocking keys for the second data set
Processable<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> grouped2 =
ds2.aggregate(secondBlockingFunction, new DistributionAggregator<String, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>() {
private static final long serialVersionUID = 1L;
@Override
public Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> getInnerKey(
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> record) {
// change the pairs such that they are considered equal if the first element is equal (ignoring the second element)
return new LeftIdentityPair<>(record.getFirst(), record.getSecond());
}
});
if(measureBlockSizes) {
System.out.println(String.format("[StandardBlocker] created %d blocking keys for first dataset", grouped1.size()));
System.out.println(String.format("[StandardBlocker] created %d blocking keys for second dataset", grouped2.size()));
}
// join the datasets via their blocking keys
Processable<Pair<
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>,
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>>>
blockedData = grouped1.join(grouped2, new PairFirstJoinKeyGenerator<>());
if(measureBlockSizes) {
System.out.println(String.format("[StandardBlocker] created %d blocks from blocking keys", blockedData.size()));
}
// remove the largest blocks, if requested
if(blockFilterRatio<1.0) {
System.out.println(String.format("[StandardBlocker] %d blocks before filtering", blockedData.size()));
Processable<Pair<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>, Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>>> toRemove = blockedData
.sort((p)->p.getFirst().getSecond().getNumElements()*p.getSecond().getSecond().getNumElements(), false)
.take((int)(blockedData.size()*(1-blockFilterRatio)));
if(measureBlockSizes) {
for(Pair<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>, Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> p : toRemove.get()) {
System.out.println(String.format("\tRemoving block '%s' (%d pairs)",
p.getFirst().getFirst(),
p.getFirst().getSecond().getNumElements() * p.getSecond().getSecond().getNumElements()));
}
}
blockedData = blockedData
.sort((p)->p.getFirst().getSecond().getNumElements()*p.getSecond().getSecond().getNumElements(), true)
.take((int)(blockedData.size()*blockFilterRatio));
System.out.println(String.format("[StandardBlocker] %d blocks after filtering", blockedData.size()));
}
if(measureBlockSizes) {
// calculate block size distribution
Processable<Pair<Integer, Distribution<Integer>>> aggregated = blockedData.aggregate(
(Pair<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>, Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> record,
DataIterator<Pair<Integer, Integer>> resultCollector)
-> {
int blockSize = record.getFirst().getSecond().getNumElements() * record.getSecond().getSecond().getNumElements();
resultCollector.next(new Pair<Integer, Integer>(0, blockSize));
}
, new DistributionAggregator<Integer, Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer getInnerKey(Integer record) {
return record;
}
});
Pair<Integer, Distribution<Integer>> aggregationResult = Q.firstOrDefault(aggregated.get());
if(aggregationResult!=null) {
Distribution<Integer> dist = aggregationResult.getSecond();
System.out.println("[StandardBlocker] Block size distribution:");
System.out.println(dist.format());
// determine frequent blocking key values
Processable<Pair<Integer, String>> blockValues = blockedData.aggregate(
(Pair<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>, Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> record,
DataIterator<Pair<Integer, String>> resultCollector)
-> {
int blockSize = record.getFirst().getSecond().getNumElements() * record.getSecond().getSecond().getNumElements();
resultCollector.next(new Pair<Integer, String>(blockSize, record.getFirst().getFirst()));
},
new StringConcatenationAggregator<>(","))
.sort((p)->p.getFirst(), false);
System.out.println("50 most-frequent blocking key values:");
for(Pair<Integer, String> value : blockValues.take(50).get()) {
System.out.println(String.format("\t%d\t%s", value.getFirst(), value.getSecond()));
}
} else {
System.out.println("No blocks were created!");
}
}
// transform the blocks into pairs of records
Processable<Correspondence<BlockedType, CorrespondenceType>> result = blockedData.map(new RecordMapper<Pair<
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>,
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>>,
Correspondence<BlockedType, CorrespondenceType>>() {
private static final long serialVersionUID = 1L;
@Override
public void mapRecord(
Pair<
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>,
Pair<String,Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> record,
DataIterator<Correspondence<BlockedType, CorrespondenceType>> resultCollector) {
// iterate over the left pairs [blocked element],[correspondences]
for(Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> p1 : record.getFirst().getSecond().getElements()){
BlockedType record1 = p1.getFirst();
// iterate over the right pairs [blocked element],[correspondences]
for(Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> p2 : record.getSecond().getSecond().getElements()){
BlockedType record2 = p2.getFirst();
Processable<Correspondence<CorrespondenceType, Matchable>> causes =
new ProcessableCollection<>(p1.getSecond())
.append(p2.getSecond())
.distinct();
int[] pairIds = new int[] { p1.getFirst().getDataSourceIdentifier(), p2.getFirst().getDataSourceIdentifier() };
Arrays.sort(pairIds);
// filter the correspondences such that only correspondences between the two records are contained (by data source id)
causes = causes.where((c)-> {
int[] causeIds = new int[] { c.getFirstRecord().getDataSourceIdentifier(), c.getSecondRecord().getDataSourceIdentifier() };
Arrays.sort(causeIds);
return Arrays.equals(pairIds, causeIds);
});
resultCollector.next(new Correspondence<BlockedType, CorrespondenceType>(record1, record2, 1.0, causes));
}
}
}
});
if(deduplicatePairs) {
//use .distinct() to remove correspondences that are found in multiple blocks
result = result.distinct();
}
calculatePerformance(dataset1, dataset2, result);
return result;
}
/* (non-Javadoc)
* @see de.uni_mannheim.informatik.wdi.matching.blocking.Blocker#runBlocking(de.uni_mannheim.informatik.wdi.model.DataSet, boolean, de.uni_mannheim.informatik.wdi.model.ResultSet, de.uni_mannheim.informatik.wdi.matching.MatchingEngine)
*/
@Override
public Processable<Correspondence<BlockedType, CorrespondenceType>> runBlocking(
DataSet<RecordType, SchemaElementType> dataset,
Processable<Correspondence<CorrespondenceType, Matchable>> schemaCorrespondences) {
// combine the datasets with the schema correspondences
// as we only use one dataset here, we don't know if the record is on the left- or right-hand side of the correspondence
Processable<Pair<RecordType, Processable<Correspondence<CorrespondenceType, Matchable>>>> ds = combineDataWithCorrespondences(dataset, schemaCorrespondences,
(r,c)->
{
c.next(new Pair<>(r.getFirstRecord().getDataSourceIdentifier(),r));
c.next(new Pair<>(r.getSecondRecord().getDataSourceIdentifier(),r));
});
// if we group the records by blocking key, we can obtain duplicates for BlockedType if it is different from RecordType and multiple records generated the same blocking key for BlockedType
// so we aggregate the results to get a unique set of BlockedType elements (using the DistributionAggregator)
// group all records by their blocking keys
Processable<Pair<String, Distribution<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>>> grouped = ds.aggregate(blockingFunction, new DistributionAggregator<String, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>, Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>>() {
private static final long serialVersionUID = 1L;
@Override
public Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> getInnerKey(
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> record) {
// change the pairs such that they are considered equal if the first element is equal (ignoring the second element)
return new LeftIdentityPair<>(record.getFirst(), record.getSecond());
}
});
// transform the groups into record pairs
Processable<Correspondence<BlockedType, CorrespondenceType>> blocked = grouped.map((g, collector) ->
{
List<Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>>> list = new ArrayList<>(g.getSecond().getElements());
// sort the list before generating the pairs, so all pairs have the lower data source id on the left-hand side.
list.sort((o1,o2)->Integer.compare(o1.getFirst().getDataSourceIdentifier(), o2.getFirst().getDataSourceIdentifier()));
for(int i = 0; i < list.size(); i++) {
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> p1 = list.get(i);
for(int j = i+1; j < list.size(); j++) {
Pair<BlockedType, Processable<Correspondence<CorrespondenceType, Matchable>>> p2 = list.get(j);
Processable<Correspondence<CorrespondenceType, Matchable>> causes = new ProcessableCollection<>(p1.getSecond()).append(p2.getSecond());
int[] pairIds = new int[] { p1.getFirst().getDataSourceIdentifier(), p2.getFirst().getDataSourceIdentifier() };
Arrays.sort(pairIds);
// filter the correspondences such that only correspondences between the two records (p1 & p2) are contained (by data source id)
causes = causes.where((c)->
{
int[] causeIds = new int[] { c.getFirstRecord().getDataSourceIdentifier(), c.getSecondRecord().getDataSourceIdentifier() };
Arrays.sort(causeIds);
return Arrays.equals(pairIds, causeIds);
}).distinct();
collector.next(new Correspondence<>(p1.getFirst(), p2.getFirst(), 1.0, causes));
}
}
});
// remove duplicates that were created if two records have multiple matching blocking keys
blocked = blocked.distinct();
calculatePerformance(dataset, dataset, blocked);
return blocked;
}
}
|
added maximum block pair size parameter
|
winter-framework/src/main/java/de/uni_mannheim/informatik/dws/winter/matching/blockers/StandardBlocker.java
|
added maximum block pair size parameter
|
|
Java
|
apache-2.0
|
0d1ff7162ffb6ed818de1bd1374c0deb4b3a045c
| 0
|
bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr
|
/*
* #%L
* %%
* Copyright (C) 2011 - 2017 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.joynr.integration;
import java.util.Properties;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import com.google.inject.Module;
import com.google.inject.util.Modules;
import io.joynr.arbitration.ArbitrationStrategy;
import io.joynr.arbitration.DiscoveryQos;
import io.joynr.exceptions.DiscoveryException;
import io.joynr.exceptions.JoynrIllegalStateException;
import io.joynr.exceptions.JoynrRuntimeException;
import io.joynr.exceptions.JoynrShutdownException;
import io.joynr.exceptions.JoynrWaitExpiredException;
import io.joynr.integration.util.DummyJoynrApplication;
import io.joynr.messaging.MessageReceiver;
import io.joynr.messaging.MessagingPropertyKeys;
import io.joynr.messaging.routing.TestGlobalAddressModule;
import io.joynr.provider.JoynrProvider;
import io.joynr.proxy.Future;
import io.joynr.proxy.ProxyBuilder;
import io.joynr.runtime.AbstractJoynrApplication;
import io.joynr.runtime.CCInProcessRuntimeModule;
import io.joynr.runtime.JoynrInjectorFactory;
import joynr.exceptions.ApplicationException;
import joynr.tests.DefaulttestProvider;
import joynr.tests.testProxy;
import joynr.types.ProviderQos;
import joynr.types.ProviderScope;
public class ShutdownTest {
private DummyJoynrApplication dummyApplication;
private JoynrProvider provider;
@Mock
private MessageReceiver messageReceiverMock;
private ProviderQos providerQos;
@Before
public void setup() {
Properties factoryPropertiesProvider = new Properties();
factoryPropertiesProvider.put(AbstractJoynrApplication.PROPERTY_JOYNR_DOMAIN_LOCAL, "localdomain");
factoryPropertiesProvider.put(MessagingPropertyKeys.CHANNELID, "ShutdownTestChannelId");
MockitoAnnotations.initMocks(this);
Module runtimeModule = Modules.override(new CCInProcessRuntimeModule()).with(new TestGlobalAddressModule());
dummyApplication = (DummyJoynrApplication) new JoynrInjectorFactory(factoryPropertiesProvider, runtimeModule).createApplication(DummyJoynrApplication.class);
provider = new DefaulttestProvider();
providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
providerQos.setPriority(System.currentTimeMillis());
}
@Test(expected = JoynrShutdownException.class)
public void testRegisterAfterShutdown() {
dummyApplication.shutdown();
dummyApplication.getRuntime().registerProvider("ShutdownTestdomain", provider, providerQos);
}
@Test(expected = JoynrShutdownException.class)
public void testUnregisterProviderAfterShutdown() {
dummyApplication.getRuntime().registerProvider("ShutdownTestdomain", provider, providerQos);
dummyApplication.shutdown();
dummyApplication.getRuntime().unregisterProvider("ShutdownTestdomain", provider);
}
@Test
public void unregisterMultibleProvidersBeforeShutdown() throws JoynrWaitExpiredException, JoynrRuntimeException,
InterruptedException, ApplicationException {
int providercount = 10;
JoynrProvider[] providers = new JoynrProvider[providercount];
for (int i = 0; i < providers.length; i++) {
providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
providerQos.setPriority(System.currentTimeMillis());
providers[i] = new DefaulttestProvider();
Future<Void> registerFinished = dummyApplication.getRuntime().registerProvider("ShutdownTestdomain" + i,
providers[i],
providerQos);
registerFinished.get();
}
for (int i = 0; i < providers.length; i++) {
dummyApplication.getRuntime().unregisterProvider("ShutdownTestdomain" + i, providers[i]);
}
dummyApplication.shutdown();
}
@Test(expected = JoynrShutdownException.class)
@Ignore
// test is taking too long because it is attempting to send deregister requests that are not implemented in the mocks
public void testProxyCallAfterShutdown() throws DiscoveryException, JoynrIllegalStateException,
InterruptedException {
Mockito.when(messageReceiverMock.getChannelId()).thenReturn("ShutdownTestChannelId");
dummyApplication.getRuntime().registerProvider("ShutdownTestdomain", provider, providerQos);
ProxyBuilder<testProxy> proxyBuilder = dummyApplication.getRuntime().getProxyBuilder("ShutdownTestdomain",
testProxy.class);
testProxy proxy = proxyBuilder.setDiscoveryQos(new DiscoveryQos(30000, ArbitrationStrategy.HighestPriority, 0))
.build();
dummyApplication.shutdown();
proxy.getFirstPrime();
}
@Ignore
@Test(expected = JoynrShutdownException.class)
public void testProxyCreationAfterShutdown() throws DiscoveryException, JoynrIllegalStateException,
InterruptedException {
// TODO
// Arbitration does not check if the runtime is already shutting down. A test like this would fail.
ProxyBuilder<testProxy> proxyBuilder = dummyApplication.getRuntime().getProxyBuilder("ShutdownTestdomain",
testProxy.class);
testProxy proxy = proxyBuilder.setDiscoveryQos(new DiscoveryQos(30000, ArbitrationStrategy.HighestPriority, 0))
.build();
dummyApplication.shutdown();
proxy.getFirstPrime();
}
}
|
java/integration-tests/src/test/java/io/joynr/integration/ShutdownTest.java
|
/*
* #%L
* %%
* Copyright (C) 2011 - 2017 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.joynr.integration;
import java.util.Properties;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import com.google.inject.Module;
import com.google.inject.util.Modules;
import io.joynr.arbitration.ArbitrationStrategy;
import io.joynr.arbitration.DiscoveryQos;
import io.joynr.exceptions.DiscoveryException;
import io.joynr.exceptions.JoynrIllegalStateException;
import io.joynr.exceptions.JoynrShutdownException;
import io.joynr.integration.util.DummyJoynrApplication;
import io.joynr.messaging.MessageReceiver;
import io.joynr.messaging.MessagingPropertyKeys;
import io.joynr.messaging.routing.TestGlobalAddressModule;
import io.joynr.provider.JoynrProvider;
import io.joynr.proxy.ProxyBuilder;
import io.joynr.runtime.AbstractJoynrApplication;
import io.joynr.runtime.CCInProcessRuntimeModule;
import io.joynr.runtime.JoynrInjectorFactory;
import joynr.tests.DefaulttestProvider;
import joynr.tests.testProxy;
import joynr.types.ProviderQos;
public class ShutdownTest {
private DummyJoynrApplication dummyApplication;
private JoynrProvider provider;
@Mock
private MessageReceiver messageReceiverMock;
private ProviderQos providerQos;
@Before
public void setup() {
Properties factoryPropertiesProvider = new Properties();
factoryPropertiesProvider.put(AbstractJoynrApplication.PROPERTY_JOYNR_DOMAIN_LOCAL, "localdomain");
factoryPropertiesProvider.put(MessagingPropertyKeys.CHANNELID, "ShutdownTestChannelId");
MockitoAnnotations.initMocks(this);
Module runtimeModule = Modules.override(new CCInProcessRuntimeModule()).with(new TestGlobalAddressModule());
dummyApplication = (DummyJoynrApplication) new JoynrInjectorFactory(factoryPropertiesProvider, runtimeModule).createApplication(DummyJoynrApplication.class);
provider = new DefaulttestProvider();
providerQos = new ProviderQos();
providerQos.setPriority(System.currentTimeMillis());
}
@Test(expected = JoynrShutdownException.class)
public void testRegisterAfterShutdown() {
dummyApplication.shutdown();
dummyApplication.getRuntime().registerProvider("ShutdownTestdomain", provider, providerQos);
}
@Test(expected = JoynrShutdownException.class)
@Ignore
// test is taking too long because it is attempting to send deregister requests that are not implemented in the mocks
public void testProxyCallAfterShutdown() throws DiscoveryException, JoynrIllegalStateException,
InterruptedException {
Mockito.when(messageReceiverMock.getChannelId()).thenReturn("ShutdownTestChannelId");
dummyApplication.getRuntime().registerProvider("ShutdownTestdomain", provider, providerQos);
ProxyBuilder<testProxy> proxyBuilder = dummyApplication.getRuntime().getProxyBuilder("ShutdownTestdomain",
testProxy.class);
testProxy proxy = proxyBuilder.setDiscoveryQos(new DiscoveryQos(30000, ArbitrationStrategy.HighestPriority, 0))
.build();
dummyApplication.shutdown();
proxy.getFirstPrime();
}
@Ignore
@Test(expected = JoynrShutdownException.class)
public void testProxyCreationAfterShutdown() throws DiscoveryException, JoynrIllegalStateException,
InterruptedException {
// TODO
// Arbitration does not check if the runtime is already shutting down. A test like this would fail.
ProxyBuilder<testProxy> proxyBuilder = dummyApplication.getRuntime().getProxyBuilder("ShutdownTestdomain",
testProxy.class);
testProxy proxy = proxyBuilder.setDiscoveryQos(new DiscoveryQos(30000, ArbitrationStrategy.HighestPriority, 0))
.build();
dummyApplication.shutdown();
proxy.getFirstPrime();
}
}
|
[Java] ShutdownTest for unregisterProvider shutdown race condition
Change-Id: I04c0e87bb0aef7ce444ae525663798a372a3f61d
|
java/integration-tests/src/test/java/io/joynr/integration/ShutdownTest.java
|
[Java] ShutdownTest for unregisterProvider shutdown race condition
|
|
Java
|
apache-2.0
|
a56d4ccb9d624e915cdecba2575a380a1eea8af8
| 0
|
pax95/camel,adessaigne/camel,christophd/camel,adessaigne/camel,cunningt/camel,cunningt/camel,pax95/camel,christophd/camel,tadayosi/camel,apache/camel,adessaigne/camel,christophd/camel,tadayosi/camel,christophd/camel,apache/camel,christophd/camel,apache/camel,tadayosi/camel,pax95/camel,adessaigne/camel,cunningt/camel,cunningt/camel,adessaigne/camel,tadayosi/camel,apache/camel,tadayosi/camel,tadayosi/camel,cunningt/camel,pax95/camel,christophd/camel,cunningt/camel,pax95/camel,apache/camel,adessaigne/camel,pax95/camel,apache/camel
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.rest.openapi;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.apicurio.datamodels.Library;
import io.apicurio.datamodels.core.models.Document;
import io.apicurio.datamodels.core.models.common.SecurityRequirement;
import io.apicurio.datamodels.openapi.models.OasDocument;
import io.apicurio.datamodels.openapi.models.OasOperation;
import io.apicurio.datamodels.openapi.models.OasParameter;
import io.apicurio.datamodels.openapi.models.OasPathItem;
import io.apicurio.datamodels.openapi.models.OasPaths;
import io.apicurio.datamodels.openapi.models.OasResponse;
import io.apicurio.datamodels.openapi.v2.models.Oas20Document;
import io.apicurio.datamodels.openapi.v2.models.Oas20Operation;
import io.apicurio.datamodels.openapi.v2.models.Oas20Parameter;
import io.apicurio.datamodels.openapi.v2.models.Oas20SecurityDefinitions;
import io.apicurio.datamodels.openapi.v2.models.Oas20SecurityScheme;
import io.apicurio.datamodels.openapi.v3.models.Oas30Document;
import io.apicurio.datamodels.openapi.v3.models.Oas30Operation;
import io.apicurio.datamodels.openapi.v3.models.Oas30Parameter;
import io.apicurio.datamodels.openapi.v3.models.Oas30Response;
import io.apicurio.datamodels.openapi.v3.models.Oas30SecurityScheme;
import io.apicurio.datamodels.openapi.v3.models.Oas30Server;
import org.apache.camel.CamelContext;
import org.apache.camel.Category;
import org.apache.camel.Consumer;
import org.apache.camel.Endpoint;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.RestConfiguration;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.DefaultEndpoint;
import org.apache.camel.support.ResourceHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.StringHelper;
import org.apache.camel.util.UnsafeUriCharactersEncoder;
import static java.util.Optional.ofNullable;
import static org.apache.camel.component.rest.openapi.RestOpenApiHelper.isHostParam;
import static org.apache.camel.component.rest.openapi.RestOpenApiHelper.isMediaRange;
import static org.apache.camel.util.ObjectHelper.isNotEmpty;
import static org.apache.camel.util.ObjectHelper.notNull;
import static org.apache.camel.util.StringHelper.after;
import static org.apache.camel.util.StringHelper.before;
import static org.apache.camel.util.StringHelper.notEmpty;
/**
* Configure REST producers based on an OpenAPI specification document delegating to a component implementing the
* RestProducerFactory interface.
*/
@UriEndpoint(firstVersion = "3.1.0", scheme = "rest-openapi", title = "REST OpenApi",
syntax = "rest-openapi:specificationUri#operationId", category = { Category.OPENAPI, Category.REST, Category.API },
producerOnly = true)
public final class RestOpenApiEndpoint extends DefaultEndpoint {
/**
* Regex pattern used to extract path parts from OpenApi specification paths
*/
private static final Pattern PATH_EXTRACTOR = Pattern.compile("/([^{}/]+)");
/**
* Remaining parameters specified in the Endpoint URI.
*/
Map<String, Object> parameters = Collections.emptyMap();
@UriParam(
description = "API basePath, for example \"`/v2`\". Default is unset, if set overrides the value present in"
+ " OpenApi specification and in the component configuration.",
defaultValue = "", label = "producer")
private String basePath;
@UriParam(description = "Name of the Camel component that will perform the requests. The component must be present"
+ " in Camel registry and it must implement RestProducerFactory service provider interface. If not set"
+ " CLASSPATH is searched for single component that implements RestProducerFactory SPI. Overrides"
+ " component configuration.",
label = "producer")
private String componentName;
@UriParam(
description = "What payload type this component capable of consuming. Could be one type, like `application/json`"
+ " or multiple types as `application/json, application/xml; q=0.5` according to the RFC7231. This equates"
+ " to the value of `Accept` HTTP header. If set overrides any value found in the OpenApi specification and."
+ " in the component configuration",
label = "producer")
private String consumes;
@UriParam(description = "Scheme hostname and port to direct the HTTP requests to in the form of"
+ " `http[s]://hostname[:port]`. Can be configured at the endpoint, component or in the corresponding"
+ " REST configuration in the Camel Context. If you give this component a name (e.g. `petstore`) that"
+ " REST configuration is consulted first, `rest-openapi` next, and global configuration last. If set"
+ " overrides any value found in the OpenApi specification, RestConfiguration. Overrides all other "
+ " configuration.",
label = "producer")
private String host;
@UriPath(description = "ID of the operation from the OpenApi specification.", label = "producer")
@Metadata(required = true)
private String operationId;
@UriParam(description = "What payload type this component is producing. For example `application/json`"
+ " according to the RFC7231. This equates to the value of `Content-Type` HTTP header. If set overrides"
+ " any value present in the OpenApi specification. Overrides all other configuration.",
label = "producer")
private String produces;
@UriPath(description = "Path to the OpenApi specification file. The scheme, host base path are taken from this"
+ " specification, but these can be overridden with properties on the component or endpoint level. If not"
+ " given the component tries to load `openapi.json` resource from the classpath. Note that the `host` defined on the"
+ " component and endpoint of this Component should contain the scheme, hostname and optionally the"
+ " port in the URI syntax (i.e. `http://api.example.com:8080`). Overrides component configuration."
+ " The OpenApi specification can be loaded from different sources by prefixing with file: classpath: http: https:."
+ " Support for https is limited to using the JDK installed UrlHandler, and as such it can be cumbersome to setup"
+ " TLS/SSL certificates for https (such as setting a number of javax.net.ssl JVM system properties)."
+ " How to do that consult the JDK documentation for UrlHandler.",
defaultValue = RestOpenApiComponent.DEFAULT_SPECIFICATION_URI_STR,
defaultValueNote = "By default loads `openapi.json` file", label = "producer")
private URI specificationUri = RestOpenApiComponent.DEFAULT_SPECIFICATION_URI;
public RestOpenApiEndpoint() {
// help tooling instantiate endpoint
}
public RestOpenApiEndpoint(final String uri, final String remaining, final RestOpenApiComponent component,
final Map<String, Object> parameters) {
super(notEmpty(uri, "uri"), notNull(component, "component"));
this.parameters = parameters;
specificationUri = before(remaining, "#", StringHelper::trimToNull)
.map(URI::create)
.orElse(ofNullable(component.getSpecificationUri()).orElse(RestOpenApiComponent.DEFAULT_SPECIFICATION_URI));
operationId = ofNullable(after(remaining, "#")).orElse(remaining);
setExchangePattern(ExchangePattern.InOut);
}
@Override
public Consumer createConsumer(final Processor processor) throws Exception {
throw new UnsupportedOperationException("Consumer not supported");
}
@Override
public Producer createProducer() throws Exception {
final CamelContext camelContext = getCamelContext();
final Document openapiDoc = loadSpecificationFrom(camelContext, specificationUri);
final OasPaths paths = ((OasDocument) openapiDoc).paths;
for (final OasPathItem path : paths.getItems()) {
final Optional<Entry<HttpMethod, OasOperation>> maybeOperationEntry = getOperationMap(path).entrySet()
.stream().filter(operationEntry -> operationId.equals(operationEntry.getValue().operationId))
.findAny();
if (maybeOperationEntry.isPresent()) {
final Entry<HttpMethod, OasOperation> operationEntry = maybeOperationEntry.get();
final OasOperation operation = operationEntry.getValue();
Map<String, OasParameter> pathParameters;
if (operation.getParameters() != null) {
pathParameters = operation.getParameters().stream()
.filter(p -> "path".equals(p.in))
.collect(Collectors.toMap(OasParameter::getName, Function.identity()));
} else {
pathParameters = new HashMap<>();
}
final String uriTemplate = resolveUri(path.getPath(), pathParameters);
final HttpMethod httpMethod = operationEntry.getKey();
final String method = httpMethod.name();
return createProducerFor(openapiDoc, operation, method, uriTemplate);
}
}
String supportedOperations = paths.getItems().stream().flatMap(p -> getOperationMap(p).values().stream())
.map(p -> p.operationId).collect(Collectors.joining(", "));
throw new IllegalArgumentException(
"The specified operation with ID: `" + operationId
+ "` cannot be found in the OpenApi specification loaded from `" + specificationUri
+ "`. Operations defined in the specification are: " + supportedOperations);
}
/**
* Generates an operationId from provided OpenApi specification path and operation
*/
private void generateMissingOperationId(String path, OasOperation operation) {
if (null == operation.operationId) {
final StringBuilder idBuilder = new StringBuilder(operation.getMethod().toLowerCase());
final Matcher matcher = PATH_EXTRACTOR.matcher(path);
while (matcher.find()) {
idBuilder.append('-').append(matcher.group(1));
}
operation.operationId = idBuilder.toString();
}
}
private Map<HttpMethod, OasOperation> getOperationMap(OasPathItem path) {
Map<HttpMethod, OasOperation> result = new LinkedHashMap<>();
final String uriPath = path.getPath();
if (path.get != null) {
generateMissingOperationId(uriPath, path.get);
result.put(HttpMethod.GET, path.get);
}
if (path.put != null) {
generateMissingOperationId(uriPath, path.put);
result.put(HttpMethod.PUT, path.put);
}
if (path.post != null) {
generateMissingOperationId(uriPath, path.post);
result.put(HttpMethod.POST, path.post);
}
if (path.delete != null) {
generateMissingOperationId(uriPath, path.delete);
result.put(HttpMethod.DELETE, path.delete);
}
if (path.patch != null) {
generateMissingOperationId(uriPath, path.patch);
result.put(HttpMethod.PATCH, path.patch);
}
if (path.head != null) {
generateMissingOperationId(uriPath, path.head);
result.put(HttpMethod.HEAD, path.head);
}
if (path.options != null) {
generateMissingOperationId(uriPath, path.options);
result.put(HttpMethod.OPTIONS, path.options);
}
return result;
}
public String getBasePath() {
return basePath;
}
public String getComponentName() {
return componentName;
}
public String getConsumes() {
return consumes;
}
public String getHost() {
return host;
}
public String getOperationId() {
return operationId;
}
public String getProduces() {
return produces;
}
public URI getSpecificationUri() {
return specificationUri;
}
@Override
public boolean isLenientProperties() {
return true;
}
public void setBasePath(final String basePath) {
this.basePath = notEmpty(basePath, "basePath");
}
public void setComponentName(final String componentName) {
this.componentName = notEmpty(componentName, "componentName");
}
public void setConsumes(final String consumes) {
this.consumes = isMediaRange(consumes, "consumes");
}
public void setHost(final String host) {
this.host = isHostParam(host);
}
public void setOperationId(final String operationId) {
this.operationId = notEmpty(operationId, "operationId");
}
public void setProduces(final String produces) {
this.produces = isMediaRange(produces, "produces");
}
public void setSpecificationUri(final URI specificationUri) {
this.specificationUri = notNull(specificationUri, "specificationUri");
}
RestOpenApiComponent component() {
return (RestOpenApiComponent) getComponent();
}
Producer createProducerFor(
final Document openapi, final OasOperation operation, final String method,
final String uriTemplate)
throws Exception {
final String basePath = determineBasePath(openapi);
final String componentEndpointUri = "rest:" + method + ":" + basePath + ":" + uriTemplate;
final CamelContext camelContext = getCamelContext();
final Endpoint endpoint = camelContext.getEndpoint(componentEndpointUri);
Map<String, Object> params = determineEndpointParameters(openapi, operation);
boolean hasHost = params.containsKey("host");
// let the rest endpoint configure itself
endpoint.configureProperties(params);
// if there is a host then we should use this hardcoded host instead of any Header that may have an existing
// Host header from some other HTTP input, and if so then lets remove it
return new RestOpenApiProducer(endpoint.createAsyncProducer(), hasHost);
}
String determineBasePath(final Document openapi) {
if (isNotEmpty(basePath)) {
return basePath;
}
final String componentBasePath = component().getBasePath();
if (isNotEmpty(componentBasePath)) {
return componentBasePath;
}
final String specificationBasePath = getBasePathFromOasDocument((OasDocument) openapi);
if (isNotEmpty(specificationBasePath)) {
return specificationBasePath;
}
final CamelContext camelContext = getCamelContext();
final RestConfiguration restConfiguration
= CamelContextHelper.getRestConfiguration(camelContext, null, determineComponentName());
final String restConfigurationBasePath = restConfiguration.getContextPath();
if (isNotEmpty(restConfigurationBasePath)) {
return restConfigurationBasePath;
}
return RestOpenApiComponent.DEFAULT_BASE_PATH;
}
public static String getBasePathFromOasDocument(final OasDocument openapi) {
String basePath = null;
if (openapi instanceof Oas20Document) {
basePath = ((Oas20Document) openapi).basePath;
} else if (openapi instanceof Oas30Document) {
if (((Oas30Document) openapi).getServers() != null
&& ((Oas30Document) openapi).getServers().get(0) != null) {
try {
Oas30Server server = (Oas30Server) ((Oas30Document) openapi).getServers().get(0);
if (server.variables != null && server.variables.get("basePath") != null) {
basePath = server.variables.get("basePath").default_;
}
if (basePath == null) {
// parse server url as fallback
URL serverUrl = new URL(parseVariables(((Oas30Document) openapi).getServers().get(0).url, server));
basePath = serverUrl.getPath();
if (basePath.indexOf("//") == 0) {
// strip off the first "/" if double "/" exists
basePath = basePath.substring(1);
}
if ("/".equals(basePath)) {
basePath = "";
}
}
} catch (MalformedURLException e) {
//not a valid whole url, just the basePath
basePath = ((Oas30Document) openapi).getServers().get(0).url;
}
}
}
return basePath;
}
public static String parseVariables(String url, Oas30Server server) {
Pattern p = Pattern.compile("\\{(.*?)\\}");
Matcher m = p.matcher(url);
while (m.find()) {
String var = m.group(1);
if (server != null && server.variables != null && server.variables.get(var) != null) {
String varValue = server.variables.get(var).default_;
url = url.replace("{" + var + "}", varValue);
}
}
return url;
}
String determineComponentName() {
return Optional.ofNullable(componentName).orElse(component().getComponentName());
}
Map<String, Object> determineEndpointParameters(final Document openapi, final OasOperation operation) {
final Map<String, Object> parameters = new HashMap<>();
final String componentName = determineComponentName();
if (componentName != null) {
parameters.put("producerComponentName", componentName);
}
final String host = determineHost(openapi);
if (host != null) {
parameters.put("host", host);
}
final RestOpenApiComponent component = component();
// what we consume is what the API defined by OpenApi specification
// produces
List<String> specificationLevelConsumers = new ArrayList<>();
if (openapi instanceof Oas20Document) {
specificationLevelConsumers = ((Oas20Document) openapi).produces;
}
List<String> operationLevelConsumers = new ArrayList<>();
if (operation instanceof Oas20Operation) {
operationLevelConsumers = ((Oas20Operation) operation).produces;
} else if (operation instanceof Oas30Operation) {
Oas30Operation oas30Operation = (Oas30Operation) operation;
if (oas30Operation.responses != null) {
for (OasResponse response : oas30Operation.responses.getResponses()) {
operationLevelConsumers.addAll(((Oas30Response) response).content.keySet());
}
}
}
final String determinedConsumes = determineOption(specificationLevelConsumers, operationLevelConsumers,
component.getConsumes(), consumes);
if (isNotEmpty(determinedConsumes)) {
parameters.put("consumes", determinedConsumes);
}
// what we produce is what the API defined by OpenApi specification
// consumes
List<String> specificationLevelProducers = new ArrayList<>();
if (openapi instanceof Oas20Document) {
specificationLevelProducers = ((Oas20Document) openapi).consumes;
}
List<String> operationLevelProducers = new ArrayList<>();
if (operation instanceof Oas20Operation) {
operationLevelProducers = ((Oas20Operation) operation).consumes;
} else if (operation instanceof Oas30Operation) {
Oas30Operation oas30Operation = (Oas30Operation) operation;
if (oas30Operation.requestBody != null && oas30Operation.requestBody.content != null) {
operationLevelProducers.addAll(oas30Operation.requestBody.content.keySet());
}
}
final String determinedProducers = determineOption(specificationLevelProducers, operationLevelProducers,
component.getProduces(), produces);
if (isNotEmpty(determinedProducers)) {
parameters.put("produces", determinedProducers);
}
final String queryParameters = determineQueryParameters(openapi, operation).map(this::queryParameter)
.collect(Collectors.joining("&"));
if (isNotEmpty(queryParameters)) {
parameters.put("queryParameters", queryParameters);
}
// pass properties that might be applied if the delegate component is
// created, i.e. if it's not
// present in the Camel Context already
final Map<String, Object> componentParameters = new HashMap<>();
if (component.isUseGlobalSslContextParameters()) {
// by default it's false
componentParameters.put("useGlobalSslContextParameters", component.isUseGlobalSslContextParameters());
}
if (component.getSslContextParameters() != null) {
componentParameters.put("sslContextParameters", component.getSslContextParameters());
}
final Map<Object, Object> nestedParameters = new HashMap<>();
if (!componentParameters.isEmpty()) {
nestedParameters.put("component", componentParameters);
}
// Add rest endpoint parameters
nestedParameters.putAll(this.parameters);
if (!nestedParameters.isEmpty()) {
// we're trying to set RestEndpoint.parameters['component']
parameters.put("parameters", nestedParameters);
}
return parameters;
}
String determineHost(final Document openapi) {
if (isNotEmpty(host)) {
return host;
}
final String componentHost = component().getHost();
if (isNotEmpty(componentHost)) {
return componentHost;
}
if (openapi instanceof Oas20Document) {
final String openapiScheme = pickBestScheme(specificationUri.getScheme(), ((Oas20Document) openapi).schemes);
final String openapiHost = ((Oas20Document) openapi).host;
if (isNotEmpty(openapiScheme) && isNotEmpty(openapiHost)) {
return openapiScheme + "://" + openapiHost;
}
} else if (openapi instanceof Oas30Document) {
//In OpenApi 3.0, scheme/host are in servers url section
//But there could be many servers url(like one for production and one for test)
//Use first one here
Oas30Document oas30Document = (Oas30Document) openapi;
if (oas30Document.getServers() != null
&& oas30Document.getServers().get(0) != null) {
try {
URL serverUrl = new URL(
parseVariables(oas30Document.getServers().get(0).url,
(Oas30Server) oas30Document.getServers().get(0)));
final String openapiScheme = serverUrl.getProtocol();
final String openapiHost = serverUrl.getHost();
if (isNotEmpty(openapiScheme) && isNotEmpty(openapiHost)) {
return openapiScheme + "://" + openapiHost;
}
} catch (MalformedURLException e) {
throw new IllegalStateException(e);
}
}
}
final CamelContext camelContext = getCamelContext();
final RestConfiguration globalRestConfiguration
= CamelContextHelper.getRestConfiguration(camelContext, null, determineComponentName());
final String globalConfigurationHost = hostFrom(globalRestConfiguration);
if (globalConfigurationHost != null) {
return globalConfigurationHost;
}
final String specificationScheme = specificationUri.getScheme();
// Perform a case insensitive "startsWith" check that works for different locales
String prefix = "http";
if (specificationUri.isAbsolute() && specificationScheme.regionMatches(true, 0, prefix, 0, prefix.length())) {
try {
return new URI(
specificationUri.getScheme(), specificationUri.getUserInfo(), specificationUri.getHost(),
specificationUri.getPort(), null, null, null).toString();
} catch (final URISyntaxException e) {
throw new IllegalStateException("Unable to create a new URI from: " + specificationUri, e);
}
}
throw new IllegalStateException(
"Unable to determine destination host for requests. The OpenApi specification"
+ " does not specify `scheme` and `host` parameters, the specification URI is not absolute with `http` or"
+ " `https` scheme, and no RestConfigurations configured with `scheme`, `host` and `port` were found for `"
+ (determineComponentName() != null
? determineComponentName() : "default" + "` component")
+ " and there is no global RestConfiguration with those properties");
}
String literalPathParameterValue(final OasParameter parameter) {
final String name = parameter.getName();
final String valueStr = String.valueOf(parameters.get(name));
final String encoded = UnsafeUriCharactersEncoder.encode(valueStr);
return encoded;
}
String literalQueryParameterValue(final OasParameter parameter) {
final String name = parameter.getName();
final String valueStr = String.valueOf(parameters.get(name));
final String encoded = UnsafeUriCharactersEncoder.encode(valueStr);
return name + "=" + encoded;
}
String queryParameter(final OasParameter parameter) {
final String name = parameter.getName();
if (ObjectHelper.isEmpty(name)) {
return "";
}
if (parameters.containsKey(name)) {
return literalQueryParameterValue(parameter);
}
return queryParameterExpression(parameter);
}
String resolveUri(final String uriTemplate, final Map<String, OasParameter> pathParameters) {
if (pathParameters.isEmpty()) {
return uriTemplate;
}
int start = uriTemplate.indexOf('{');
if (start == -1) {
return uriTemplate;
}
int pos = 0;
final StringBuilder resolved = new StringBuilder(uriTemplate.length() * 2);
while (start != -1) {
resolved.append(uriTemplate, pos, start);
final int end = uriTemplate.indexOf('}', start);
final String name = uriTemplate.substring(start + 1, end);
if (parameters.containsKey(name)) {
final OasParameter parameter = pathParameters.get(name);
final Object value = literalPathParameterValue(parameter);
resolved.append(value);
} else {
resolved.append('{').append(name).append('}');
}
pos = end + 1;
start = uriTemplate.indexOf('{', pos);
}
if (pos < uriTemplate.length()) {
resolved.append(uriTemplate, pos, uriTemplate.length());
}
return resolved.toString();
}
static String determineOption(
final List<String> specificationLevel, final List<String> operationLevel,
final String componentLevel, final String endpointLevel) {
if (isNotEmpty(endpointLevel)) {
return endpointLevel;
}
if (isNotEmpty(componentLevel)) {
return componentLevel;
}
if (operationLevel != null && !operationLevel.isEmpty()) {
return String.join(", ", operationLevel);
}
if (specificationLevel != null && !specificationLevel.isEmpty()) {
return String.join(", ", specificationLevel);
}
return null;
}
static Stream<OasParameter> determineQueryParameters(final Document openapi, final OasOperation operation) {
final List<SecurityRequirement> securityRequirements = operation.security;
final List<OasParameter> apiKeyQueryParameters = new ArrayList<>();
if (securityRequirements != null) {
if (openapi instanceof Oas20Document) {
Oas20Document oas20Document = (Oas20Document) openapi;
Oas20SecurityDefinitions securityDefinitions = oas20Document.securityDefinitions;
for (final SecurityRequirement securityRequirement : securityRequirements) {
for (final String securityRequirementName : securityRequirement.getSecurityRequirementNames()) {
final Oas20SecurityScheme securitySchemeDefinition = securityDefinitions
.getSecurityScheme(securityRequirementName);
if (securitySchemeDefinition.in != null
&& securitySchemeDefinition.in.equals("query")) {
Oas20Parameter securityParameter = new Oas20Parameter(securitySchemeDefinition.name);
securityParameter.required = true;
securityParameter.type = "string";
securityParameter.description = securitySchemeDefinition.description;
apiKeyQueryParameters.add(securityParameter);
}
}
}
} else if (openapi instanceof Oas30Document) {
Oas30Document oas30Document = (Oas30Document) openapi;
for (final SecurityRequirement securityRequirement : securityRequirements) {
for (final String securityRequirementName : securityRequirement.getSecurityRequirementNames()) {
final Oas30SecurityScheme securitySchemeDefinition = oas30Document.components
.getSecurityScheme(securityRequirementName);
if (securitySchemeDefinition.in != null && securitySchemeDefinition.in.equals("query")) {
Oas30Parameter securityParameter = new Oas30Parameter(securitySchemeDefinition.name);
securityParameter.required = true;
securityParameter.description = securitySchemeDefinition.description;
apiKeyQueryParameters.add(securityParameter);
}
}
}
} else {
throw new IllegalStateException("We only support OpenApi 2.0 or 3.0 document here");
}
}
if (operation.getParameters() != null) {
return Stream.concat(apiKeyQueryParameters.stream(),
operation.getParameters().stream().filter(p -> "query".equals(p.in)));
} else {
return apiKeyQueryParameters.stream();
}
}
static String hostFrom(final RestConfiguration restConfiguration) {
if (restConfiguration == null) {
return null;
}
final String scheme = restConfiguration.getScheme();
final String host = restConfiguration.getHost();
final int port = restConfiguration.getPort();
if (scheme == null || host == null) {
return null;
}
final StringBuilder answer = new StringBuilder(scheme).append("://").append(host);
if (port > 0 && !("http".equalsIgnoreCase(scheme) && port == 80)
&& !("https".equalsIgnoreCase(scheme) && port == 443)) {
answer.append(':').append(port);
}
return answer.toString();
}
/**
* Loads the OpenApi definition model from the given path. Tries to resolve the resource using Camel's resource
* loading support, if it fails uses OpenApi's resource loading support instead.
*
* @param uri URI of the specification
* @param camelContext context to use
* @return the specification
*/
static Document loadSpecificationFrom(final CamelContext camelContext, final URI uri) {
final ObjectMapper mapper = new ObjectMapper();
final String uriAsString = uri.toString();
try (InputStream stream = ResourceHelper.resolveMandatoryResourceAsInputStream(camelContext, uriAsString)) {
final JsonNode node = mapper.readTree(stream);
return Library.readDocument(node);
} catch (final Exception e) {
throw new IllegalArgumentException(
"The given OpenApi specification could not be loaded from `" + uri
+ "`. Tried loading using Camel's resource resolution and using OpenApi's own resource resolution."
+ " OpenApi tends to swallow exceptions while parsing, try specifying Java system property `debugParser`"
+ " (e.g. `-DdebugParser=true`), the exception that occurred when loading using Camel's resource"
+ " loader follows",
e);
}
}
static String pickBestScheme(final String specificationScheme, final List<String> schemes) {
if (schemes != null && !schemes.isEmpty()) {
if (schemes.contains("https")) {
return "https";
}
if (schemes.contains("http")) {
return "http";
}
}
return specificationScheme;
// there is no support for WebSocket (Scheme.WS, Scheme.WSS)
}
static String queryParameterExpression(final OasParameter parameter) {
final String name = parameter.getName();
final StringBuilder expression = new StringBuilder(name).append("={").append(name);
if (parameter.required == null || !parameter.required) {
expression.append('?');
}
expression.append('}');
return expression.toString();
}
enum HttpMethod {
POST,
GET,
PUT,
PATCH,
DELETE,
HEAD,
OPTIONS
}
}
|
components/camel-rest-openapi/src/main/java/org/apache/camel/component/rest/openapi/RestOpenApiEndpoint.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.rest.openapi;
import static java.util.Optional.ofNullable;
import static org.apache.camel.component.rest.openapi.RestOpenApiHelper.isHostParam;
import static org.apache.camel.component.rest.openapi.RestOpenApiHelper.isMediaRange;
import static org.apache.camel.util.ObjectHelper.isNotEmpty;
import static org.apache.camel.util.ObjectHelper.notNull;
import static org.apache.camel.util.StringHelper.after;
import static org.apache.camel.util.StringHelper.before;
import static org.apache.camel.util.StringHelper.notEmpty;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.camel.CamelContext;
import org.apache.camel.Category;
import org.apache.camel.Consumer;
import org.apache.camel.Endpoint;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.RestConfiguration;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.DefaultEndpoint;
import org.apache.camel.support.ResourceHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.StringHelper;
import org.apache.camel.util.UnsafeUriCharactersEncoder;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.apicurio.datamodels.Library;
import io.apicurio.datamodels.core.models.Document;
import io.apicurio.datamodels.core.models.common.SecurityRequirement;
import io.apicurio.datamodels.openapi.models.OasDocument;
import io.apicurio.datamodels.openapi.models.OasOperation;
import io.apicurio.datamodels.openapi.models.OasParameter;
import io.apicurio.datamodels.openapi.models.OasPathItem;
import io.apicurio.datamodels.openapi.models.OasPaths;
import io.apicurio.datamodels.openapi.models.OasResponse;
import io.apicurio.datamodels.openapi.v2.models.Oas20Document;
import io.apicurio.datamodels.openapi.v2.models.Oas20Operation;
import io.apicurio.datamodels.openapi.v2.models.Oas20Parameter;
import io.apicurio.datamodels.openapi.v2.models.Oas20SecurityDefinitions;
import io.apicurio.datamodels.openapi.v2.models.Oas20SecurityScheme;
import io.apicurio.datamodels.openapi.v3.models.Oas30Document;
import io.apicurio.datamodels.openapi.v3.models.Oas30Operation;
import io.apicurio.datamodels.openapi.v3.models.Oas30Parameter;
import io.apicurio.datamodels.openapi.v3.models.Oas30Response;
import io.apicurio.datamodels.openapi.v3.models.Oas30SecurityScheme;
import io.apicurio.datamodels.openapi.v3.models.Oas30Server;
/**
* Configure REST producers based on an OpenAPI specification document delegating to a component implementing the
* RestProducerFactory interface.
*/
@UriEndpoint(firstVersion = "3.1.0", scheme = "rest-openapi", title = "REST OpenApi",
syntax = "rest-openapi:specificationUri#operationId", category = { Category.OPENAPI, Category.REST, Category.API },
producerOnly = true)
public final class RestOpenApiEndpoint extends DefaultEndpoint {
/**
* Regex pattern used to extract path parts from OpenApi specification paths
*/
private static final Pattern PATH_EXTRACTOR = Pattern.compile("/([^{}/]+)");
/**
* Remaining parameters specified in the Endpoint URI.
*/
Map<String, Object> parameters = Collections.emptyMap();
@UriParam(
description = "API basePath, for example \"`/v2`\". Default is unset, if set overrides the value present in"
+ " OpenApi specification and in the component configuration.",
defaultValue = "", label = "producer")
private String basePath;
@UriParam(description = "Name of the Camel component that will perform the requests. The component must be present"
+ " in Camel registry and it must implement RestProducerFactory service provider interface. If not set"
+ " CLASSPATH is searched for single component that implements RestProducerFactory SPI. Overrides"
+ " component configuration.",
label = "producer")
private String componentName;
@UriParam(
description = "What payload type this component capable of consuming. Could be one type, like `application/json`"
+ " or multiple types as `application/json, application/xml; q=0.5` according to the RFC7231. This equates"
+ " to the value of `Accept` HTTP header. If set overrides any value found in the OpenApi specification and."
+ " in the component configuration",
label = "producer")
private String consumes;
@UriParam(description = "Scheme hostname and port to direct the HTTP requests to in the form of"
+ " `http[s]://hostname[:port]`. Can be configured at the endpoint, component or in the corresponding"
+ " REST configuration in the Camel Context. If you give this component a name (e.g. `petstore`) that"
+ " REST configuration is consulted first, `rest-openapi` next, and global configuration last. If set"
+ " overrides any value found in the OpenApi specification, RestConfiguration. Overrides all other "
+ " configuration.",
label = "producer")
private String host;
@UriPath(description = "ID of the operation from the OpenApi specification.", label = "producer")
@Metadata(required = true)
private String operationId;
@UriParam(description = "What payload type this component is producing. For example `application/json`"
+ " according to the RFC7231. This equates to the value of `Content-Type` HTTP header. If set overrides"
+ " any value present in the OpenApi specification. Overrides all other configuration.",
label = "producer")
private String produces;
@UriPath(description = "Path to the OpenApi specification file. The scheme, host base path are taken from this"
+ " specification, but these can be overridden with properties on the component or endpoint level. If not"
+ " given the component tries to load `openapi.json` resource from the classpath. Note that the `host` defined on the"
+ " component and endpoint of this Component should contain the scheme, hostname and optionally the"
+ " port in the URI syntax (i.e. `http://api.example.com:8080`). Overrides component configuration."
+ " The OpenApi specification can be loaded from different sources by prefixing with file: classpath: http: https:."
+ " Support for https is limited to using the JDK installed UrlHandler, and as such it can be cumbersome to setup"
+ " TLS/SSL certificates for https (such as setting a number of javax.net.ssl JVM system properties)."
+ " How to do that consult the JDK documentation for UrlHandler.",
defaultValue = RestOpenApiComponent.DEFAULT_SPECIFICATION_URI_STR,
defaultValueNote = "By default loads `openapi.json` file", label = "producer")
private URI specificationUri = RestOpenApiComponent.DEFAULT_SPECIFICATION_URI;
public RestOpenApiEndpoint() {
// help tooling instantiate endpoint
}
public RestOpenApiEndpoint(final String uri, final String remaining, final RestOpenApiComponent component,
final Map<String, Object> parameters) {
super(notEmpty(uri, "uri"), notNull(component, "component"));
this.parameters = parameters;
specificationUri = before(remaining, "#", StringHelper::trimToNull)
.map(URI::create)
.orElse(ofNullable(component.getSpecificationUri()).orElse(RestOpenApiComponent.DEFAULT_SPECIFICATION_URI));
operationId = ofNullable(after(remaining, "#")).orElse(remaining);
setExchangePattern(ExchangePattern.InOut);
}
@Override
public Consumer createConsumer(final Processor processor) throws Exception {
throw new UnsupportedOperationException("Consumer not supported");
}
@Override
public Producer createProducer() throws Exception {
final CamelContext camelContext = getCamelContext();
final Document openapiDoc = loadSpecificationFrom(camelContext, specificationUri);
final OasPaths paths = ((OasDocument) openapiDoc).paths;
for (final OasPathItem path : paths.getItems()) {
final Optional<Entry<HttpMethod, OasOperation>> maybeOperationEntry = getOperationMap(path).entrySet()
.stream().filter(operationEntry -> operationId.equals(operationEntry.getValue().operationId))
.findAny();
if (maybeOperationEntry.isPresent()) {
final Entry<HttpMethod, OasOperation> operationEntry = maybeOperationEntry.get();
final OasOperation operation = operationEntry.getValue();
Map<String, OasParameter> pathParameters;
if (operation.getParameters() != null) {
pathParameters = operation.getParameters().stream()
.filter(p -> "path".equals(p.in))
.collect(Collectors.toMap(OasParameter::getName, Function.identity()));
} else {
pathParameters = new HashMap<>();
}
final String uriTemplate = resolveUri(path.getPath(), pathParameters);
final HttpMethod httpMethod = operationEntry.getKey();
final String method = httpMethod.name();
return createProducerFor(openapiDoc, operation, method, uriTemplate);
}
}
String supportedOperations = paths.getItems().stream().flatMap(p -> getOperationMap(p).values().stream())
.map(p -> p.operationId).collect(Collectors.joining(", "));
throw new IllegalArgumentException(
"The specified operation with ID: `" + operationId
+ "` cannot be found in the OpenApi specification loaded from `" + specificationUri
+ "`. Operations defined in the specification are: " + supportedOperations);
}
/**
* Generates an operationId from provided OpenApi specification path and operation
*/
private void generateMissingOperationId(String path, OasOperation operation) {
if (null == operation.operationId) {
final StringBuilder idBuilder = new StringBuilder(operation.getMethod().toLowerCase());
final Matcher matcher = PATH_EXTRACTOR.matcher(path);
while (matcher.find()) {
idBuilder.append('-').append(matcher.group(1));
}
operation.operationId = idBuilder.toString();
}
}
private Map<HttpMethod, OasOperation> getOperationMap(OasPathItem path) {
Map<HttpMethod, OasOperation> result = new LinkedHashMap<>();
final String uriPath = path.getPath();
if (path.get != null) {
generateMissingOperationId(uriPath, path.get);
result.put(HttpMethod.GET, path.get);
}
if (path.put != null) {
generateMissingOperationId(uriPath, path.put);
result.put(HttpMethod.PUT, path.put);
}
if (path.post != null) {
generateMissingOperationId(uriPath, path.post);
result.put(HttpMethod.POST, path.post);
}
if (path.delete != null) {
generateMissingOperationId(uriPath, path.delete);
result.put(HttpMethod.DELETE, path.delete);
}
if (path.patch != null) {
generateMissingOperationId(uriPath, path.patch);
result.put(HttpMethod.PATCH, path.patch);
}
if (path.head != null) {
generateMissingOperationId(uriPath, path.head);
result.put(HttpMethod.HEAD, path.head);
}
if (path.options != null) {
generateMissingOperationId(uriPath, path.options);
result.put(HttpMethod.OPTIONS, path.options);
}
return result;
}
public String getBasePath() {
return basePath;
}
public String getComponentName() {
return componentName;
}
public String getConsumes() {
return consumes;
}
public String getHost() {
return host;
}
public String getOperationId() {
return operationId;
}
public String getProduces() {
return produces;
}
public URI getSpecificationUri() {
return specificationUri;
}
@Override
public boolean isLenientProperties() {
return true;
}
public void setBasePath(final String basePath) {
this.basePath = notEmpty(basePath, "basePath");
}
public void setComponentName(final String componentName) {
this.componentName = notEmpty(componentName, "componentName");
}
public void setConsumes(final String consumes) {
this.consumes = isMediaRange(consumes, "consumes");
}
public void setHost(final String host) {
this.host = isHostParam(host);
}
public void setOperationId(final String operationId) {
this.operationId = notEmpty(operationId, "operationId");
}
public void setProduces(final String produces) {
this.produces = isMediaRange(produces, "produces");
}
public void setSpecificationUri(final URI specificationUri) {
this.specificationUri = notNull(specificationUri, "specificationUri");
}
RestOpenApiComponent component() {
return (RestOpenApiComponent) getComponent();
}
Producer createProducerFor(
final Document openapi, final OasOperation operation, final String method,
final String uriTemplate)
throws Exception {
final String basePath = determineBasePath(openapi);
final String componentEndpointUri = "rest:" + method + ":" + basePath + ":" + uriTemplate;
final CamelContext camelContext = getCamelContext();
final Endpoint endpoint = camelContext.getEndpoint(componentEndpointUri);
Map<String, Object> params = determineEndpointParameters(openapi, operation);
boolean hasHost = params.containsKey("host");
// let the rest endpoint configure itself
endpoint.configureProperties(params);
// if there is a host then we should use this hardcoded host instead of any Header that may have an existing
// Host header from some other HTTP input, and if so then lets remove it
return new RestOpenApiProducer(endpoint.createAsyncProducer(), hasHost);
}
String determineBasePath(final Document openapi) {
if (isNotEmpty(basePath)) {
return basePath;
}
final String componentBasePath = component().getBasePath();
if (isNotEmpty(componentBasePath)) {
return componentBasePath;
}
final String specificationBasePath = getBasePathFromOasDocument((OasDocument) openapi);
if (isNotEmpty(specificationBasePath)) {
return specificationBasePath;
}
final CamelContext camelContext = getCamelContext();
final RestConfiguration restConfiguration
= CamelContextHelper.getRestConfiguration(camelContext, null, determineComponentName());
final String restConfigurationBasePath = restConfiguration.getContextPath();
if (isNotEmpty(restConfigurationBasePath)) {
return restConfigurationBasePath;
}
return RestOpenApiComponent.DEFAULT_BASE_PATH;
}
public static String getBasePathFromOasDocument(final OasDocument openapi) {
String basePath = null;
if (openapi instanceof Oas20Document) {
basePath = ((Oas20Document) openapi).basePath;
} else if (openapi instanceof Oas30Document) {
if (((Oas30Document) openapi).getServers() != null
&& ((Oas30Document) openapi).getServers().get(0) != null) {
try {
Oas30Server server = (Oas30Server) ((Oas30Document) openapi).getServers().get(0);
if (server.variables != null && server.variables.get("basePath") != null) {
basePath = server.variables.get("basePath").default_;
}
if (basePath == null) {
// parse server url as fallback
URL serverUrl = new URL(parseVariables(((Oas30Document) openapi).getServers().get(0).url, server));
basePath = serverUrl.getPath();
if (basePath.indexOf("//") == 0) {
// strip off the first "/" if double "/" exists
basePath = basePath.substring(1);
}
if ("/".equals(basePath)) {
basePath = "";
}
}
} catch (MalformedURLException e) {
//not a valid whole url, just the basePath
basePath = ((Oas30Document) openapi).getServers().get(0).url;
}
}
}
return basePath;
}
public static String parseVariables(String url, Oas30Server server) {
Pattern p = Pattern.compile("\\{(.*?)\\}");
Matcher m = p.matcher(url);
while (m.find()) {
String var = m.group(1);
if (server != null && server.variables != null && server.variables.get(var) != null) {
String varValue = server.variables.get(var).default_;
url = url.replace("{" + var + "}", varValue);
}
}
return url;
}
String determineComponentName() {
return Optional.ofNullable(componentName).orElse(component().getComponentName());
}
Map<String, Object> determineEndpointParameters(final Document openapi, final OasOperation operation) {
final Map<String, Object> parameters = new HashMap<>();
final String componentName = determineComponentName();
if (componentName != null) {
parameters.put("producerComponentName", componentName);
}
final String host = determineHost(openapi);
if (host != null) {
parameters.put("host", host);
}
final RestOpenApiComponent component = component();
// what we consume is what the API defined by OpenApi specification
// produces
List<String> specificationLevelConsumers = new ArrayList<>();
if (openapi instanceof Oas20Document) {
specificationLevelConsumers = ((Oas20Document) openapi).produces;
}
List<String> operationLevelConsumers = new ArrayList<>();
if (operation instanceof Oas20Operation) {
operationLevelConsumers = ((Oas20Operation) operation).produces;
} else if (operation instanceof Oas30Operation) {
Oas30Operation oas30Operation = (Oas30Operation) operation;
if (oas30Operation.responses != null) {
for (OasResponse response : oas30Operation.responses.getResponses()) {
operationLevelConsumers.addAll(((Oas30Response) response).content.keySet());
}
}
}
final String determinedConsumes = determineOption(specificationLevelConsumers, operationLevelConsumers,
component.getConsumes(), consumes);
if (isNotEmpty(determinedConsumes)) {
parameters.put("consumes", determinedConsumes);
}
// what we produce is what the API defined by OpenApi specification
// consumes
List<String> specificationLevelProducers = new ArrayList<>();
if (openapi instanceof Oas20Document) {
specificationLevelProducers = ((Oas20Document) openapi).consumes;
}
List<String> operationLevelProducers = new ArrayList<>();
if (operation instanceof Oas20Operation) {
operationLevelProducers = ((Oas20Operation) operation).consumes;
} else if (operation instanceof Oas30Operation) {
Oas30Operation oas30Operation = (Oas30Operation) operation;
if (oas30Operation.requestBody != null && oas30Operation.requestBody.content != null) {
operationLevelProducers.addAll(oas30Operation.requestBody.content.keySet());
}
}
final String determinedProducers = determineOption(specificationLevelProducers, operationLevelProducers,
component.getProduces(), produces);
if (isNotEmpty(determinedProducers)) {
parameters.put("produces", determinedProducers);
}
final String queryParameters = determineQueryParameters(openapi, operation).map(this::queryParameter)
.collect(Collectors.joining("&"));
if (isNotEmpty(queryParameters)) {
parameters.put("queryParameters", queryParameters);
}
// pass properties that might be applied if the delegate component is
// created, i.e. if it's not
// present in the Camel Context already
final Map<String, Object> componentParameters = new HashMap<>();
if (component.isUseGlobalSslContextParameters()) {
// by default it's false
componentParameters.put("useGlobalSslContextParameters", component.isUseGlobalSslContextParameters());
}
if (component.getSslContextParameters() != null) {
componentParameters.put("sslContextParameters", component.getSslContextParameters());
}
final Map<Object, Object> nestedParameters = new HashMap<>();
if (!componentParameters.isEmpty()) {
nestedParameters.put("component", componentParameters);
}
// Add rest endpoint parameters
nestedParameters.putAll(this.parameters);
if (!nestedParameters.isEmpty()) {
// we're trying to set RestEndpoint.parameters['component']
parameters.put("parameters", nestedParameters);
}
return parameters;
}
String determineHost(final Document openapi) {
if (isNotEmpty(host)) {
return host;
}
final String componentHost = component().getHost();
if (isNotEmpty(componentHost)) {
return componentHost;
}
if (openapi instanceof Oas20Document) {
final String openapiScheme = pickBestScheme(specificationUri.getScheme(), ((Oas20Document) openapi).schemes);
final String openapiHost = ((Oas20Document) openapi).host;
if (isNotEmpty(openapiScheme) && isNotEmpty(openapiHost)) {
return openapiScheme + "://" + openapiHost;
}
} else if (openapi instanceof Oas30Document) {
//In OpenApi 3.0, scheme/host are in servers url section
//But there could be many servers url(like one for production and one for test)
//Use first one here
Oas30Document oas30Document = (Oas30Document) openapi;
if (oas30Document.getServers() != null
&& oas30Document.getServers().get(0) != null) {
try {
URL serverUrl = new URL(
parseVariables(oas30Document.getServers().get(0).url,
(Oas30Server) oas30Document.getServers().get(0)));
final String openapiScheme = serverUrl.getProtocol();
final String openapiHost = serverUrl.getHost();
if (isNotEmpty(openapiScheme) && isNotEmpty(openapiHost)) {
return openapiScheme + "://" + openapiHost;
}
} catch (MalformedURLException e) {
throw new IllegalStateException(e);
}
}
}
final CamelContext camelContext = getCamelContext();
final RestConfiguration globalRestConfiguration
= CamelContextHelper.getRestConfiguration(camelContext, null, determineComponentName());
final String globalConfigurationHost = hostFrom(globalRestConfiguration);
if (globalConfigurationHost != null) {
return globalConfigurationHost;
}
final String specificationScheme = specificationUri.getScheme();
// Perform a case insensitive "startsWith" check that works for different locales
String prefix = "http";
if (specificationUri.isAbsolute() && specificationScheme.regionMatches(true, 0, prefix, 0, prefix.length())) {
try {
return new URI(
specificationUri.getScheme(), specificationUri.getUserInfo(), specificationUri.getHost(),
specificationUri.getPort(), null, null, null).toString();
} catch (final URISyntaxException e) {
throw new IllegalStateException("Unable to create a new URI from: " + specificationUri, e);
}
}
throw new IllegalStateException(
"Unable to determine destination host for requests. The OpenApi specification"
+ " does not specify `scheme` and `host` parameters, the specification URI is not absolute with `http` or"
+ " `https` scheme, and no RestConfigurations configured with `scheme`, `host` and `port` were found for `"
+ (determineComponentName() != null
? determineComponentName() : "default" + "` component")
+ " and there is no global RestConfiguration with those properties");
}
String literalPathParameterValue(final OasParameter parameter) {
final String name = parameter.getName();
final String valueStr = String.valueOf(parameters.get(name));
final String encoded = UnsafeUriCharactersEncoder.encode(valueStr);
return encoded;
}
String literalQueryParameterValue(final OasParameter parameter) {
final String name = parameter.getName();
final String valueStr = String.valueOf(parameters.get(name));
final String encoded = UnsafeUriCharactersEncoder.encode(valueStr);
return name + "=" + encoded;
}
String queryParameter(final OasParameter parameter) {
final String name = parameter.getName();
if (ObjectHelper.isEmpty(name)) {
return "";
}
if (parameters.containsKey(name)) {
return literalQueryParameterValue(parameter);
}
return queryParameterExpression(parameter);
}
String resolveUri(final String uriTemplate, final Map<String, OasParameter> pathParameters) {
if (pathParameters.isEmpty()) {
return uriTemplate;
}
int start = uriTemplate.indexOf('{');
if (start == -1) {
return uriTemplate;
}
int pos = 0;
final StringBuilder resolved = new StringBuilder(uriTemplate.length() * 2);
while (start != -1) {
resolved.append(uriTemplate, pos, start);
final int end = uriTemplate.indexOf('}', start);
final String name = uriTemplate.substring(start + 1, end);
if (parameters.containsKey(name)) {
final OasParameter parameter = pathParameters.get(name);
final Object value = literalPathParameterValue(parameter);
resolved.append(value);
} else {
resolved.append('{').append(name).append('}');
}
pos = end + 1;
start = uriTemplate.indexOf('{', pos);
}
if (pos < uriTemplate.length()) {
resolved.append(uriTemplate, pos, uriTemplate.length());
}
return resolved.toString();
}
static String determineOption(
final List<String> specificationLevel, final List<String> operationLevel,
final String componentLevel, final String endpointLevel) {
if (isNotEmpty(endpointLevel)) {
return endpointLevel;
}
if (isNotEmpty(componentLevel)) {
return componentLevel;
}
if (operationLevel != null && !operationLevel.isEmpty()) {
return String.join(", ", operationLevel);
}
if (specificationLevel != null && !specificationLevel.isEmpty()) {
return String.join(", ", specificationLevel);
}
return null;
}
static Stream<OasParameter> determineQueryParameters(final Document openapi, final OasOperation operation) {
final List<SecurityRequirement> securityRequirements = operation.security;
final List<OasParameter> apiKeyQueryParameters = new ArrayList<>();
if (securityRequirements != null) {
if (openapi instanceof Oas20Document) {
Oas20Document oas20Document = (Oas20Document) openapi;
Oas20SecurityDefinitions securityDefinitions = oas20Document.securityDefinitions;
for (final SecurityRequirement securityRequirement : securityRequirements) {
for (final String securityRequirementName : securityRequirement.getSecurityRequirementNames()) {
final Oas20SecurityScheme securitySchemeDefinition = securityDefinitions
.getSecurityScheme(securityRequirementName);
if (securitySchemeDefinition.in != null
&& securitySchemeDefinition.in.equals("query")) {
Oas20Parameter securityParameter = new Oas20Parameter(securitySchemeDefinition.name);
securityParameter.required = true;
securityParameter.type = "string";
securityParameter.description = securitySchemeDefinition.description;
apiKeyQueryParameters.add(securityParameter);
}
}
}
} else if (openapi instanceof Oas30Document) {
Oas30Document oas30Document = (Oas30Document) openapi;
for (final SecurityRequirement securityRequirement : securityRequirements) {
for (final String securityRequirementName : securityRequirement.getSecurityRequirementNames()) {
final Oas30SecurityScheme securitySchemeDefinition = oas30Document.components
.getSecurityScheme(securityRequirementName);
if (securitySchemeDefinition.in != null && securitySchemeDefinition.in.equals("query")) {
Oas30Parameter securityParameter = new Oas30Parameter(securitySchemeDefinition.name);
securityParameter.required = true;
securityParameter.description = securitySchemeDefinition.description;
apiKeyQueryParameters.add(securityParameter);
}
}
}
} else {
throw new IllegalStateException("We only support OpenApi 2.0 or 3.0 document here");
}
}
if (operation.getParameters() != null) {
return Stream.concat(apiKeyQueryParameters.stream(),
operation.getParameters().stream().filter(p -> "query".equals(p.in)));
} else {
return apiKeyQueryParameters.stream();
}
}
static String hostFrom(final RestConfiguration restConfiguration) {
if (restConfiguration == null) {
return null;
}
final String scheme = restConfiguration.getScheme();
final String host = restConfiguration.getHost();
final int port = restConfiguration.getPort();
if (scheme == null || host == null) {
return null;
}
final StringBuilder answer = new StringBuilder(scheme).append("://").append(host);
if (port > 0 && !("http".equalsIgnoreCase(scheme) && port == 80)
&& !("https".equalsIgnoreCase(scheme) && port == 443)) {
answer.append(':').append(port);
}
return answer.toString();
}
/**
* Loads the OpenApi definition model from the given path. Tries to resolve the resource using Camel's resource
* loading support, if it fails uses OpenApi's resource loading support instead.
*
* @param uri URI of the specification
* @param camelContext context to use
* @return the specification
*/
static Document loadSpecificationFrom(final CamelContext camelContext, final URI uri) {
final ObjectMapper mapper = new ObjectMapper();
final String uriAsString = uri.toString();
try (InputStream stream = ResourceHelper.resolveMandatoryResourceAsInputStream(camelContext, uriAsString)) {
final JsonNode node = mapper.readTree(stream);
return Library.readDocument(node);
} catch (final Exception e) {
throw new IllegalArgumentException(
"The given OpenApi specification could not be loaded from `" + uri
+ "`. Tried loading using Camel's resource resolution and using OpenApi's own resource resolution."
+ " OpenApi tends to swallow exceptions while parsing, try specifying Java system property `debugParser`"
+ " (e.g. `-DdebugParser=true`), the exception that occurred when loading using Camel's resource"
+ " loader follows",
e);
}
}
static String pickBestScheme(final String specificationScheme, final List<String> schemes) {
if (schemes != null && !schemes.isEmpty()) {
if (schemes.contains("https")) {
return "https";
}
if (schemes.contains("http")) {
return "http";
}
}
return specificationScheme;
// there is no support for WebSocket (Scheme.WS, Scheme.WSS)
}
static String queryParameterExpression(final OasParameter parameter) {
final String name = parameter.getName();
final StringBuilder expression = new StringBuilder(name).append("={").append(name);
if (parameter.required == null || !parameter.required) {
expression.append('?');
}
expression.append('}');
return expression.toString();
}
enum HttpMethod {
POST,
GET,
PUT,
PATCH,
DELETE,
HEAD,
OPTIONS
}
}
|
CAMEL-17043 Added rest endpoint parameters to nestedParameters (#6211)
|
components/camel-rest-openapi/src/main/java/org/apache/camel/component/rest/openapi/RestOpenApiEndpoint.java
|
CAMEL-17043 Added rest endpoint parameters to nestedParameters (#6211)
|
|
Java
|
apache-2.0
|
225d887ba5662778ea8e11be4ac8712673af7df8
| 0
|
MythTV-Clients/MythTV-Service-API
|
/**
*
*/
package org.mythtv.services.api.converters;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.simpleframework.xml.transform.Transform;
/**
* @author Daniel Frey
*
*/
public class JodaDateTimeTransform implements Transform<DateTime> {
private static final String TAG = JodaDateTimeTransform.class.getSimpleName();
private static final DateTimeFormatter isoFmt = DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss'Z'" );
private static final DateTimeFormatter dateFmt = DateTimeFormat.forPattern( "EEE MM dd yyyy" );
private static final DateTimeFormatter dateTimeShortFmt = DateTimeFormat.forPattern( "yyyyMMddHHmmss" );
private final Logger logger;
public JodaDateTimeTransform() {
logger = Logger.getLogger( JodaDateTimeTransform.TAG );
}
@Override
public DateTime read( String value ) throws Exception {
try {
return isoFmt.parseDateTime( value ).withZoneRetainFields( DateTimeZone.UTC );
} catch( Exception e ) {
//logger.log( Level.WARNING, e.getMessage(), e );
}
try {
return dateFmt.parseDateTime( value );
} catch( Exception e ) {
//logger.log( Level.WARNING, e.getMessage(), e );
}
try {
return dateTimeShortFmt.parseDateTime( value );
} catch( Exception e ) {
//logger.log( Level.WARNING, e.getMessage(), e );
}
return null;
}
@Override
public String write( DateTime value ) throws Exception {
throw new UnsupportedOperationException( "not implemented" );
}
}
|
src/main/java/org/mythtv/services/api/converters/JodaDateTimeTransform.java
|
/**
*
*/
package org.mythtv.services.api.converters;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.simpleframework.xml.transform.Transform;
/**
* @author Daniel Frey
*
*/
public class JodaDateTimeTransform implements Transform<DateTime> {
private static final String TAG = JodaDateTimeTransform.class.getSimpleName();
private static final DateTimeFormatter isoFmt = DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss'Z'" );
private static final DateTimeFormatter dateFmt = DateTimeFormat.forPattern( "EEE MM dd yyyy" );
private static final DateTimeFormatter dateTimeShortFmt = DateTimeFormat.forPattern( "yyyyMMddHHmmss" );
private final Logger logger;
public JodaDateTimeTransform() {
logger = Logger.getLogger( JodaDateTimeTransform.TAG );
}
@Override
public DateTime read( String value ) throws Exception {
try {
return isoFmt.parseDateTime( value );
} catch( Exception e ) {
//logger.log( Level.WARNING, e.getMessage(), e );
}
try {
return dateFmt.parseDateTime( value );
} catch( Exception e ) {
//logger.log( Level.WARNING, e.getMessage(), e );
}
try {
return dateTimeShortFmt.parseDateTime( value );
} catch( Exception e ) {
//logger.log( Level.WARNING, e.getMessage(), e );
}
return null;
}
@Override
public String write( DateTime value ) throws Exception {
throw new UnsupportedOperationException( "not implemented" );
}
}
|
updated status datetime conversion to use UTC dates
|
src/main/java/org/mythtv/services/api/converters/JodaDateTimeTransform.java
|
updated status datetime conversion to use UTC dates
|
|
Java
|
apache-2.0
|
5707a2cfb7a444e2b75b72a0250e5ed1c283aad0
| 0
|
jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,jacksonic/vjlofvhjfgm
|
/**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.lib.json;
import foam.lib.parse.*;
import foam.util.SafetyUtil;
import java.util.Calendar;
import java.util.Date;
import java.util.TimeZone;
public class DateParser
extends ProxyParser
{
protected static ThreadLocal<StringBuilder> sb = new ThreadLocal<StringBuilder>() {
@Override
protected StringBuilder initialValue() {
return new StringBuilder();
}
@Override
public StringBuilder get() {
StringBuilder b = super.get();
b.setLength(0);
return b;
}
};
public DateParser() {
super(new Alt(
new NullParser(),
new LongParser(),
new Seq(
new Literal("\""),
new IntParser(),
new Literal("-"),
new IntParser(),
new Literal("-"),
new IntParser(),
new Literal("T"),
new IntParser(),
new Literal(":"),
new IntParser(),
new Literal(":"),
new IntParser(),
new Literal("."),
new Repeat(new Chars("0123456789")),
new Literal("Z"),
new Literal("\""))));
}
public PStream parse(PStream ps, ParserContext x) {
ps = super.parse(ps, x);
if ( ps == null ) {
return null;
}
if ( ps.value() == null ) {
return ps.setValue(null);
}
// Checks if Long Date (Timestamp from epoch)
if ( ps.value() instanceof Long ) {
return ps.setValue(new Date((Long) ps.value()));
}
Object[] result = (Object[]) ps.value();
// TODO: Handle sub-millisecond accuracy, either with java 8 java.time package or some custom type
// to support java 7
Calendar c = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
c.clear();
c.set(
(Integer) result[1],
(Integer) result[3] - 1, // Java calendar uses zero-indexed months
(Integer) result[5],
(Integer) result[7],
(Integer) result[9],
(Integer) result[11]);
boolean zeroPrefixed = true;
StringBuilder milliseconds = sb.get();
Object[] millis = (Object[]) result[13];
for ( int i = 0 ; i < millis.length ; i++ ) {
// do not prefix with zeros
if ( zeroPrefixed && '0' == (char) millis[i] ) {
continue;
}
// append millisecond
if ( zeroPrefixed ) zeroPrefixed = false;
milliseconds.append((char) millis[i]);
}
// try to parse milliseconds, default to 0
c.add(Calendar.MILLISECOND, ! SafetyUtil.isEmpty(milliseconds.toString()) ?
Integer.parseInt(milliseconds.toString(), 10) : 0);
return ps.setValue(c.getTime());
}
}
|
src/foam/lib/json/DateParser.java
|
/**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.lib.json;
import foam.lib.parse.*;
import foam.util.SafetyUtil;
import java.util.Calendar;
import java.util.Date;
import java.util.TimeZone;
public class DateParser
extends ProxyParser
{
protected static ThreadLocal<StringBuilder> sb = new ThreadLocal<StringBuilder>() {
@Override
protected StringBuilder initialValue() {
return new StringBuilder();
}
@Override
public StringBuilder get() {
StringBuilder b = super.get();
b.setLength(0);
return b;
}
};
public DateParser() {
super(new Alt(
new NullParser(),
new LongParser(),
new Seq(
new Literal("\""),
new IntParser(),
new Literal("-"),
new IntParser(),
new Literal("-"),
new IntParser(),
new Literal("T"),
new IntParser(),
new Literal(":"),
new IntParser(),
new Literal(":"),
new IntParser(),
new Literal("."),
new Repeat(new Chars("0123456789")),
new Literal("Z"),
new Literal("\""))));
}
public PStream parse(PStream ps, ParserContext x) {
ps = super.parse(ps, x);
if (ps == null) {
return null;
}
if (ps.value() == null) {
return ps.setValue(null);
}
// Checks if Long Date (Timestamp from epoch)
if (ps.value() instanceof Long) {
return ps.setValue(new Date((Long) ps.value()));
}
Object[] result = (Object[]) ps.value();
// TODO: Handle sub-millisecond accuracy, either with java 8 java.time package or some custom type
// to support java 7
Calendar c = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
c.clear();
c.set(
(Integer) result[1],
(Integer) result[3] - 1, // Java calendar uses zero-indexed months
(Integer) result[5],
(Integer) result[7],
(Integer) result[9],
(Integer) result[11]);
boolean zeroPrefixed = true;
StringBuilder milliseconds = sb.get();
Object[] millis = (Object[]) result[13];
for ( int i = 0 ; i < millis.length ; i++ ) {
// do not prefix with zeros
if ( zeroPrefixed && '0' == (char) millis[i] ) {
continue;
}
// append millisecond
if ( zeroPrefixed ) zeroPrefixed = false;
milliseconds.append((char) millis[i]);
}
// try to parse milliseconds, default to 0
c.add(Calendar.MILLISECOND, ! SafetyUtil.isEmpty(milliseconds.toString()) ?
Integer.parseInt(milliseconds.toString(), 10) : 0);
return ps.setValue(c.getTime());
}
}
|
Fixed styling
|
src/foam/lib/json/DateParser.java
|
Fixed styling
|
|
Java
|
apache-2.0
|
1bee9243f90717c93e0012a720c767e0bd5ee39e
| 0
|
dkpro/dkpro-lab
|
/*******************************************************************************
* Copyright 2016
* Ubiquitous Knowledge Processing (UKP) Lab
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.dkpro.lab.conversion;
import static org.junit.Assert.*;
import java.io.File;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.dkpro.lab.Lab;
import org.dkpro.lab.engine.TaskContext;
import org.dkpro.lab.task.Dimension;
import org.dkpro.lab.task.Discriminator;
import org.dkpro.lab.task.ParameterSpace;
import org.dkpro.lab.task.Task;
import org.dkpro.lab.task.impl.DefaultBatchTask;
import org.dkpro.lab.task.impl.ExecutableTaskBase;
import org.junit.Before;
import org.junit.Test;
public class ConversionServiceTest
{
final String KEY = "abcdef";
static String discriminatorText = null;
Task consumer;
@Before
public void setup()
{
File path = new File("target/repository/" + getClass().getSimpleName() + "/");
System.setProperty("DKPRO_HOME", path.getAbsolutePath());
FileUtils.deleteQuietly(path);
initConsumer();
}
private void initConsumer()
{
consumer = new ExecutableTaskBase()
{
@Discriminator(name = KEY)
protected Integer x;
@Override
public void execute(TaskContext aContext)
throws Exception
{
Map<String, String> descriminators = getDescriminators();
for (String k : descriminators.keySet()) {
if (k.contains(KEY)) {
discriminatorText = descriminators.get(k);
break;
}
}
}
};
}
@Test
public void testDiscriminationWithoutConversionServiceOverride()
throws Exception
{
Integer integer = new Integer(3);
ParameterSpace ps = new ParameterSpace(Dimension.create(KEY, integer));
DefaultBatchTask batch = new DefaultBatchTask();
batch.setParameterSpace(ps);
batch.addTask(consumer);
Lab instance = Lab.newInstance(Lab.DEFAULT_CONTEXT);
instance.run(batch);
assertEquals("3", discriminatorText);
}
@Test
public void testDiscriminationWithConversionServiceOverride()
throws Exception
{
Integer integer = new Integer(3);
ParameterSpace ps = new ParameterSpace(Dimension.create(KEY, integer));
DefaultBatchTask batch = new DefaultBatchTask();
batch.setParameterSpace(ps);
batch.addTask(consumer);
Lab instance = Lab.newInstance(Lab.DEFAULT_CONTEXT);
// we register an alternative text for the integer value which should be used instead of the
// default of converting the numerical value to string
instance.getConversionService().registerDiscriminable(integer, "three");
instance.run(batch);
assertEquals("three", discriminatorText);
}
@Test
public void testConversionService(){
Lab instance = Lab.newInstance(Lab.DEFAULT_CONTEXT);
ConversionService conversionService = instance.getConversionService();
assertNotNull(conversionService);
String key = "hello";
conversionService.registerDiscriminable(key, "a Text");
assertTrue(conversionService.isRegistered(key));
assertEquals("a Text", conversionService.getDiscriminableValue(key));
}
}
|
dkpro-lab-core/src/test/java/org/dkpro/lab/conversion/ConversionServiceTest.java
|
/*******************************************************************************
* Copyright 2016
* Ubiquitous Knowledge Processing (UKP) Lab
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.dkpro.lab.conversion;
import static org.junit.Assert.*;
import java.io.File;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.dkpro.lab.Lab;
import org.dkpro.lab.engine.TaskContext;
import org.dkpro.lab.task.Dimension;
import org.dkpro.lab.task.Discriminator;
import org.dkpro.lab.task.ParameterSpace;
import org.dkpro.lab.task.Task;
import org.dkpro.lab.task.impl.DefaultBatchTask;
import org.dkpro.lab.task.impl.ExecutableTaskBase;
import org.junit.Before;
import org.junit.Test;
public class ConversionServiceTest
{
final String KEY = "abcdef";
static String discriminatorText = null;
Task consumer;
@Before
public void setup()
{
File path = new File("target/repository/" + getClass().getSimpleName() + "/");
System.setProperty("DKPRO_HOME", path.getAbsolutePath());
FileUtils.deleteQuietly(path);
initConsumer();
}
private void initConsumer()
{
consumer = new ExecutableTaskBase()
{
@Discriminator(name = KEY)
protected Integer x;
@Override
public void execute(TaskContext aContext)
throws Exception
{
Map<String, String> descriminators = getDescriminators();
for (String k : descriminators.keySet()) {
if (k.contains(KEY)) {
discriminatorText = descriminators.get(k);
break;
}
}
}
};
}
@Test
public void testDiscriminationWithoutConversionServiceOverride()
throws Exception
{
Integer integer = new Integer(3);
ParameterSpace ps = new ParameterSpace(Dimension.create(KEY, integer));
DefaultBatchTask batch = new DefaultBatchTask();
batch.setParameterSpace(ps);
batch.addTask(consumer);
Lab instance = Lab.getInstance();
instance.run(batch);
assertEquals("3", discriminatorText);
}
@Test
public void testDiscriminationWithConversionServiceOverride()
throws Exception
{
Integer integer = new Integer(3);
ParameterSpace ps = new ParameterSpace(Dimension.create(KEY, integer));
DefaultBatchTask batch = new DefaultBatchTask();
batch.setParameterSpace(ps);
batch.addTask(consumer);
Lab instance = Lab.getInstance();
// we register an alternative text for the integer value which should be used instead of the
// default of converting the numerical value to string
instance.getConversionService().registerDiscriminable(integer, "three");
instance.run(batch);
assertEquals("three", discriminatorText);
}
@Test
public void testConversionService(){
Lab instance = Lab.getInstance();
ConversionService conversionService = instance.getConversionService();
assertNotNull(conversionService);
String key = "hello";
conversionService.registerDiscriminable(key, "a Text");
assertTrue(conversionService.isRegistered(key));
assertEquals("a Text", conversionService.getDiscriminableValue(key));
}
}
|
#106 - Spurious test failure in ConversionServiceTest.testDiscriminationWithoutConversionServiceOverride
- Ensure each test runs with its own copy of the conversion service.
|
dkpro-lab-core/src/test/java/org/dkpro/lab/conversion/ConversionServiceTest.java
|
#106 - Spurious test failure in ConversionServiceTest.testDiscriminationWithoutConversionServiceOverride
|
|
Java
|
apache-2.0
|
9941b8e605aab76eafb66ab063092af6ff944221
| 0
|
tkpanther/ignite,BiryukovVA/ignite,ilantukh/ignite,apacheignite/ignite,psadusumilli/ignite,svladykin/ignite,f7753/ignite,vsuslov/incubator-ignite,shurun19851206/ignite,shroman/ignite,samaitra/ignite,leveyj/ignite,vldpyatkov/ignite,dream-x/ignite,NSAmelchev/ignite,dlnufox/ignite,SharplEr/ignite,ryanzz/ignite,NSAmelchev/ignite,abhishek-ch/incubator-ignite,andrey-kuznetsov/ignite,vadopolski/ignite,vsuslov/incubator-ignite,ptupitsyn/ignite,arijitt/incubator-ignite,apacheignite/ignite,irudyak/ignite,gargvish/ignite,dream-x/ignite,kromulan/ignite,avinogradovgg/ignite,wmz7year/ignite,WilliamDo/ignite,zzcclp/ignite,SomeFire/ignite,alexzaitzev/ignite,nizhikov/ignite,akuznetsov-gridgain/ignite,avinogradovgg/ignite,samaitra/ignite,endian675/ignite,agoncharuk/ignite,agura/incubator-ignite,StalkXT/ignite,sk0x50/ignite,alexzaitzev/ignite,afinka77/ignite,ntikhonov/ignite,vsisko/incubator-ignite,NSAmelchev/ignite,kidaa/incubator-ignite,a1vanov/ignite,agura/incubator-ignite,a1vanov/ignite,vladisav/ignite,shurun19851206/ignite,louishust/incubator-ignite,kidaa/incubator-ignite,ntikhonov/ignite,psadusumilli/ignite,BiryukovVA/ignite,louishust/incubator-ignite,ptupitsyn/ignite,dlnufox/ignite,arijitt/incubator-ignite,gargvish/ignite,NSAmelchev/ignite,ashutakGG/incubator-ignite,sk0x50/ignite,vsuslov/incubator-ignite,sk0x50/ignite,adeelmahmood/ignite,vadopolski/ignite,alexzaitzev/ignite,wmz7year/ignite,arijitt/incubator-ignite,wmz7year/ignite,apache/ignite,shurun19851206/ignite,endian675/ignite,ascherbakoff/ignite,DoudTechData/ignite,sylentprayer/ignite,ryanzz/ignite,sylentprayer/ignite,xtern/ignite,akuznetsov-gridgain/ignite,murador/ignite,andrey-kuznetsov/ignite,rfqu/ignite,chandresh-pancholi/ignite,gargvish/ignite,nivanov/ignite,gargvish/ignite,akuznetsov-gridgain/ignite,WilliamDo/ignite,vadopolski/ignite,mcherkasov/ignite,amirakhmedov/ignite,shurun19851206/ignite,voipp/ignite,svladykin/ignite,svladykin/ignite,wmz7year/ignite,NSAmelchev/ignite,adeelmahmood/ignite,agoncharuk/ignite,rfqu/ignite,vsisko/incubator-ignite,arijitt/incubator-ignite,murador/ignite,daradurvs/ignite,VladimirErshov/ignite,vldpyatkov/ignite,murador/ignite,VladimirErshov/ignite,ntikhonov/ignite,voipp/ignite,ntikhonov/ignite,apache/ignite,StalkXT/ignite,dlnufox/ignite,zzcclp/ignite,kromulan/ignite,apache/ignite,zzcclp/ignite,vladisav/ignite,vsuslov/incubator-ignite,endian675/ignite,agura/incubator-ignite,vldpyatkov/ignite,ilantukh/ignite,voipp/ignite,dmagda/incubator-ignite,samaitra/ignite,gargvish/ignite,amirakhmedov/ignite,samaitra/ignite,avinogradovgg/ignite,agoncharuk/ignite,ilantukh/ignite,arijitt/incubator-ignite,sylentprayer/ignite,leveyj/ignite,endian675/ignite,vsisko/incubator-ignite,kromulan/ignite,vsuslov/incubator-ignite,ashutakGG/incubator-ignite,amirakhmedov/ignite,gridgain/apache-ignite,dmagda/incubator-ignite,dlnufox/ignite,chandresh-pancholi/ignite,endian675/ignite,SomeFire/ignite,f7753/ignite,sk0x50/ignite,ryanzz/ignite,leveyj/ignite,nivanov/ignite,zzcclp/ignite,ilantukh/ignite,SharplEr/ignite,amirakhmedov/ignite,ptupitsyn/ignite,samaitra/ignite,SomeFire/ignite,alexzaitzev/ignite,endian675/ignite,vsisko/incubator-ignite,sylentprayer/ignite,ascherbakoff/ignite,andrey-kuznetsov/ignite,gargvish/ignite,nivanov/ignite,wmz7year/ignite,apacheignite/ignite,StalkXT/ignite,wmz7year/ignite,irudyak/ignite,murador/ignite,andrey-kuznetsov/ignite,thuTom/ignite,samaitra/ignite,svladykin/ignite,vldpyatkov/ignite,SharplEr/ignite,SomeFire/ignite,vladisav/ignite,leveyj/ignite,irudyak/ignite,leveyj/ignite,tkpanther/ignite,VladimirErshov/ignite,shroman/ignite,ascherbakoff/ignite,pperalta/ignite,nivanov/ignite,f7753/ignite,svladykin/ignite,agoncharuk/ignite,alexzaitzev/ignite,WilliamDo/ignite,shurun19851206/ignite,ptupitsyn/ignite,abhishek-ch/incubator-ignite,DoudTechData/ignite,dream-x/ignite,kromulan/ignite,amirakhmedov/ignite,mcherkasov/ignite,agoncharuk/ignite,endian675/ignite,gridgain/apache-ignite,vsisko/incubator-ignite,tkpanther/ignite,apacheignite/ignite,psadusumilli/ignite,mcherkasov/ignite,dmagda/incubator-ignite,tkpanther/ignite,StalkXT/ignite,andrey-kuznetsov/ignite,apache/ignite,mcherkasov/ignite,ptupitsyn/ignite,rfqu/ignite,ilantukh/ignite,BiryukovVA/ignite,dmagda/incubator-ignite,vsisko/incubator-ignite,psadusumilli/ignite,andrey-kuznetsov/ignite,BiryukovVA/ignite,amirakhmedov/ignite,ascherbakoff/ignite,vladisav/ignite,irudyak/ignite,voipp/ignite,psadusumilli/ignite,chandresh-pancholi/ignite,f7753/ignite,VladimirErshov/ignite,nivanov/ignite,wmz7year/ignite,tkpanther/ignite,agura/incubator-ignite,ptupitsyn/ignite,a1vanov/ignite,agura/incubator-ignite,f7753/ignite,xtern/ignite,agura/incubator-ignite,sk0x50/ignite,afinka77/ignite,kidaa/incubator-ignite,avinogradovgg/ignite,gridgain/apache-ignite,BiryukovVA/ignite,WilliamDo/ignite,vsuslov/incubator-ignite,irudyak/ignite,dream-x/ignite,thuTom/ignite,vadopolski/ignite,zzcclp/ignite,andrey-kuznetsov/ignite,ptupitsyn/ignite,iveselovskiy/ignite,WilliamDo/ignite,pperalta/ignite,rfqu/ignite,dmagda/incubator-ignite,chandresh-pancholi/ignite,pperalta/ignite,nizhikov/ignite,irudyak/ignite,vadopolski/ignite,daradurvs/ignite,xtern/ignite,pperalta/ignite,dream-x/ignite,thuTom/ignite,shroman/ignite,vsisko/incubator-ignite,gargvish/ignite,leveyj/ignite,murador/ignite,a1vanov/ignite,avinogradovgg/ignite,SomeFire/ignite,daradurvs/ignite,vldpyatkov/ignite,xtern/ignite,thuTom/ignite,DoudTechData/ignite,samaitra/ignite,nivanov/ignite,xtern/ignite,rfqu/ignite,xtern/ignite,StalkXT/ignite,mcherkasov/ignite,rfqu/ignite,VladimirErshov/ignite,iveselovskiy/ignite,gridgain/apache-ignite,ilantukh/ignite,iveselovskiy/ignite,ntikhonov/ignite,andrey-kuznetsov/ignite,ascherbakoff/ignite,thuTom/ignite,vadopolski/ignite,chandresh-pancholi/ignite,afinka77/ignite,agoncharuk/ignite,svladykin/ignite,pperalta/ignite,amirakhmedov/ignite,shroman/ignite,voipp/ignite,nizhikov/ignite,ryanzz/ignite,adeelmahmood/ignite,akuznetsov-gridgain/ignite,leveyj/ignite,afinka77/ignite,xtern/ignite,a1vanov/ignite,chandresh-pancholi/ignite,SomeFire/ignite,kromulan/ignite,sylentprayer/ignite,BiryukovVA/ignite,vldpyatkov/ignite,SomeFire/ignite,ntikhonov/ignite,ntikhonov/ignite,a1vanov/ignite,gridgain/apache-ignite,nivanov/ignite,vladisav/ignite,gargvish/ignite,rfqu/ignite,thuTom/ignite,thuTom/ignite,nizhikov/ignite,dream-x/ignite,avinogradovgg/ignite,chandresh-pancholi/ignite,sk0x50/ignite,pperalta/ignite,wmz7year/ignite,samaitra/ignite,vladisav/ignite,shroman/ignite,endian675/ignite,sylentprayer/ignite,kromulan/ignite,shroman/ignite,mcherkasov/ignite,f7753/ignite,WilliamDo/ignite,daradurvs/ignite,f7753/ignite,louishust/incubator-ignite,alexzaitzev/ignite,BiryukovVA/ignite,samaitra/ignite,shurun19851206/ignite,dlnufox/ignite,BiryukovVA/ignite,adeelmahmood/ignite,kromulan/ignite,amirakhmedov/ignite,xtern/ignite,vldpyatkov/ignite,SomeFire/ignite,voipp/ignite,sk0x50/ignite,agura/incubator-ignite,daradurvs/ignite,nizhikov/ignite,apache/ignite,akuznetsov-gridgain/ignite,SharplEr/ignite,NSAmelchev/ignite,apache/ignite,pperalta/ignite,zzcclp/ignite,ashutakGG/incubator-ignite,apache/ignite,chandresh-pancholi/ignite,ptupitsyn/ignite,tkpanther/ignite,psadusumilli/ignite,ptupitsyn/ignite,leveyj/ignite,andrey-kuznetsov/ignite,DoudTechData/ignite,nizhikov/ignite,vladisav/ignite,dream-x/ignite,SharplEr/ignite,samaitra/ignite,shroman/ignite,daradurvs/ignite,VladimirErshov/ignite,WilliamDo/ignite,voipp/ignite,mcherkasov/ignite,ntikhonov/ignite,SharplEr/ignite,sk0x50/ignite,DoudTechData/ignite,alexzaitzev/ignite,ascherbakoff/ignite,afinka77/ignite,murador/ignite,apache/ignite,abhishek-ch/incubator-ignite,ryanzz/ignite,louishust/incubator-ignite,ryanzz/ignite,dlnufox/ignite,apache/ignite,ashutakGG/incubator-ignite,shurun19851206/ignite,akuznetsov-gridgain/ignite,apacheignite/ignite,shroman/ignite,vadopolski/ignite,f7753/ignite,daradurvs/ignite,dmagda/incubator-ignite,SharplEr/ignite,ilantukh/ignite,ascherbakoff/ignite,a1vanov/ignite,ashutakGG/incubator-ignite,kidaa/incubator-ignite,chandresh-pancholi/ignite,BiryukovVA/ignite,adeelmahmood/ignite,irudyak/ignite,adeelmahmood/ignite,vsisko/incubator-ignite,SharplEr/ignite,daradurvs/ignite,SharplEr/ignite,DoudTechData/ignite,DoudTechData/ignite,ascherbakoff/ignite,dream-x/ignite,arijitt/incubator-ignite,mcherkasov/ignite,dlnufox/ignite,ascherbakoff/ignite,shroman/ignite,thuTom/ignite,apacheignite/ignite,a1vanov/ignite,afinka77/ignite,dmagda/incubator-ignite,shroman/ignite,StalkXT/ignite,SomeFire/ignite,pperalta/ignite,vldpyatkov/ignite,afinka77/ignite,zzcclp/ignite,shurun19851206/ignite,abhishek-ch/incubator-ignite,tkpanther/ignite,agoncharuk/ignite,svladykin/ignite,VladimirErshov/ignite,xtern/ignite,louishust/incubator-ignite,andrey-kuznetsov/ignite,agura/incubator-ignite,StalkXT/ignite,NSAmelchev/ignite,vladisav/ignite,iveselovskiy/ignite,ptupitsyn/ignite,nizhikov/ignite,sylentprayer/ignite,voipp/ignite,murador/ignite,StalkXT/ignite,adeelmahmood/ignite,NSAmelchev/ignite,kidaa/incubator-ignite,dmagda/incubator-ignite,nizhikov/ignite,iveselovskiy/ignite,nivanov/ignite,vadopolski/ignite,dlnufox/ignite,kromulan/ignite,daradurvs/ignite,louishust/incubator-ignite,nizhikov/ignite,irudyak/ignite,zzcclp/ignite,ilantukh/ignite,sylentprayer/ignite,afinka77/ignite,rfqu/ignite,abhishek-ch/incubator-ignite,alexzaitzev/ignite,avinogradovgg/ignite,ashutakGG/incubator-ignite,BiryukovVA/ignite,abhishek-ch/incubator-ignite,voipp/ignite,iveselovskiy/ignite,ilantukh/ignite,apacheignite/ignite,ryanzz/ignite,murador/ignite,sk0x50/ignite,adeelmahmood/ignite,alexzaitzev/ignite,psadusumilli/ignite,SomeFire/ignite,StalkXT/ignite,gridgain/apache-ignite,DoudTechData/ignite,NSAmelchev/ignite,gridgain/apache-ignite,ryanzz/ignite,tkpanther/ignite,irudyak/ignite,VladimirErshov/ignite,amirakhmedov/ignite,psadusumilli/ignite,WilliamDo/ignite,ilantukh/ignite,kidaa/incubator-ignite,apacheignite/ignite,agoncharuk/ignite,daradurvs/ignite
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.schema.ui;
import javafx.application.*;
import javafx.beans.value.*;
import javafx.collections.*;
import javafx.concurrent.*;
import javafx.event.*;
import javafx.geometry.*;
import javafx.scene.*;
import javafx.scene.control.*;
import javafx.scene.layout.*;
import javafx.stage.*;
import javafx.util.*;
import org.apache.ignite.cache.query.*;
import org.apache.ignite.schema.generator.*;
import org.apache.ignite.schema.model.*;
import org.apache.ignite.schema.parser.*;
import java.io.*;
import java.net.*;
import java.sql.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.prefs.*;
import static javafx.embed.swing.SwingFXUtils.*;
import static org.apache.ignite.schema.ui.Controls.*;
/**
* Schema load application.
*/
@SuppressWarnings("UnnecessaryFullyQualifiedName")
public class SchemaLoadApp extends Application {
/** */
private Stage owner;
/** */
private BorderPane rootPane;
/** */
private Label titleLb;
/** */
private Button prevBtn;
/** */
private Button nextBtn;
/** */
private TextField jdbcDrvJarTf;
/** */
private TextField jdbcDrvClsTf;
/** */
private TextField jdbcUrlTf;
/** */
private TextField userTf;
/** */
private PasswordField pwdTf;
/** */
private GridPaneEx connPnl;
/** */
private StackPane connLayerPnl;
/** */
private TableView<PojoDescriptor> pojosTbl;
/** */
private TextField outFolderTf;
/** */
private TextField pkgTf;
/** */
private CheckBox pojoConstructorCh;
/** */
private CheckBox pojoIncludeKeysCh;
/** */
private CheckBox xmlSingleFileCh;
/** */
private CheckBox openFolderCh;
/** */
private TextField regexTf;
/** */
private TextField replaceTf;
/** */
private GridPaneEx genPnl;
/** */
private StackPane genLayerPnl;
/** */
private ProgressIndicator pi;
/** List with POJOs descriptors. */
private ObservableList<PojoDescriptor> pojos = FXCollections.emptyObservableList();
/** Currently selected POJO. */
private PojoDescriptor curPojo;
/** */
private final Map<String, Driver> drivers = new HashMap<>();
/** */
private static final ObservableList<PojoField> NO_FIELDS = FXCollections.emptyObservableList();
/** */
private final ExecutorService exec = Executors.newSingleThreadExecutor(new ThreadFactory() {
@Override public Thread newThread(Runnable r) {
Thread t = new Thread(r, "schema-load-worker");
t.setDaemon(true);
return t;
}
});
/**
* Lock UI before start long task.
*
* @param layer Stack pane to add progress indicator.
* @param controls Controls to disable.
*/
private void lockUI(StackPane layer, Node... controls) {
for (Node control : controls)
control.setDisable(true);
layer.getChildren().add(pi);
}
/**
* Unlock UI after long task finished.
*
* @param layer Stack pane to remove progress indicator.
* @param controls Controls to enable.
*/
private void unlockUI(StackPane layer, Node... controls) {
for (Node control : controls)
control.setDisable(false);
layer.getChildren().remove(pi);
}
/**
* Perceptual delay to avoid UI flickering.
*
* @param started Time when background progress started.
*/
private void perceptualDelay(long started) {
long delta = System.currentTimeMillis() - started;
if (delta < 500)
try {
Thread.sleep(500 - delta);
}
catch (InterruptedException ignored) {
Thread.currentThread().interrupt();
}
}
/**
* Fill tree with database metadata.
*/
private void fill() {
lockUI(connLayerPnl, connPnl, nextBtn);
Runnable task = new Task<Void>() {
/** {@inheritDoc} */
@Override protected Void call() throws Exception {
long started = System.currentTimeMillis();
try (Connection conn = connect()) {
pojos = DatabaseMetadataParser.parse(conn);
}
perceptualDelay(started);
return null;
}
/** {@inheritDoc} */
@Override protected void succeeded() {
super.succeeded();
pojosTbl.setItems(pojos);
if (!pojos.isEmpty())
pojosTbl.getSelectionModel().select(pojos.get(0));
unlockUI(connLayerPnl, connPnl, nextBtn);
titleLb.setText("Generate XML And POJOs");
titleLb.setGraphic(imageView("text_tree", 48));
rootPane.setCenter(genLayerPnl);
prevBtn.setDisable(false);
nextBtn.setText("Generate");
tooltip(nextBtn, "Generate XML and POJO files");
}
/** {@inheritDoc} */
@Override protected void cancelled() {
super.cancelled();
unlockUI(connLayerPnl, connPnl, nextBtn);
}
/** {@inheritDoc} */
@Override protected void failed() {
super.succeeded();
unlockUI(connLayerPnl, connPnl, nextBtn);
MessageBox.errorDialog(owner, "Failed to get tables list from database.", getException());
}
};
exec.submit(task);
}
/**
* Generate XML and POJOs.
*/
private void generate() {
final Collection<PojoDescriptor> selPojos = selectedItems();
if (selPojos.isEmpty()) {
MessageBox.warningDialog(owner, "Please select tables to generate XML and POJOs files!");
return;
}
lockUI(genLayerPnl, genPnl, prevBtn, nextBtn);
final String outFolder = outFolderTf.getText();
final String pkg = pkgTf.getText();
final File destFolder = new File(outFolder);
Runnable task = new Task<Void>() {
private void checkEmpty(final PojoDescriptor pojo, Collection<CacheQueryTypeDescriptor> descs,
String msg) {
if (descs.isEmpty()) {
Platform.runLater(new Runnable() {
@Override public void run() {
pojosTbl.getSelectionModel().select(pojo);
}
});
throw new IllegalStateException(msg + pojo.table());
}
}
/** {@inheritDoc} */
@Override protected Void call() throws Exception {
long started = System.currentTimeMillis();
if (!destFolder.exists() && !destFolder.mkdirs())
throw new IOException("Failed to create output folder: " + destFolder);
Collection<CacheQueryTypeMetadata> all = new ArrayList<>();
boolean constructor = pojoConstructorCh.isSelected();
boolean includeKeys = pojoIncludeKeysCh.isSelected();
boolean singleXml = xmlSingleFileCh.isSelected();
ConfirmCallable askOverwrite = new ConfirmCallable(owner, "File already exists: %s\nOverwrite?");
// Generate XML and POJO.
for (PojoDescriptor pojo : selPojos) {
if (pojo.selected()) {
CacheQueryTypeMetadata meta = pojo.metadata(includeKeys);
checkEmpty(pojo, meta.getKeyDescriptors(), "No key fields specified for type: ");
checkEmpty(pojo, meta.getValueDescriptors(), "No value fields specified for type: ");
all.add(meta);
if (!singleXml)
XmlGenerator.generate(pkg, meta, new File(destFolder, meta.getType() + ".xml"),
askOverwrite);
PojoGenerator.generate(pojo, outFolder, pkg, constructor, includeKeys, askOverwrite);
}
}
if (singleXml)
XmlGenerator.generate(pkg, all, new File(outFolder, "Ignite.xml"), askOverwrite);
perceptualDelay(started);
return null;
}
/** {@inheritDoc} */
@Override protected void succeeded() {
super.succeeded();
unlockUI(genLayerPnl, genPnl, prevBtn, nextBtn);
MessageBox.informationDialog(owner, "Generation complete!");
if (openFolderCh.isSelected())
try {
java.awt.Desktop.getDesktop().open(destFolder);
}
catch (IOException e) {
MessageBox.errorDialog(owner, "Failed to open folder with results.", e);
}
}
/** {@inheritDoc} */
@Override protected void cancelled() {
super.cancelled();
unlockUI(genLayerPnl, genPnl, prevBtn, nextBtn);
MessageBox.warningDialog(owner, "Generation canceled.");
}
/** {@inheritDoc} */
@Override protected void failed() {
super.succeeded();
unlockUI(genLayerPnl, genPnl, prevBtn, nextBtn);
MessageBox.errorDialog(owner, "Generation failed.", getException());
}
};
exec.submit(task);
}
/**
* @return Header pane with title label.
*/
private Pane createHeaderPane() {
titleLb = label("");
titleLb.setId("banner");
BorderPane bp = borderPane(null, hBox(10, true, titleLb), null, null, hBox(0, true, imageView("ignite", 48)));
bp.setId("banner");
return bp;
}
/**
* @return Panel with control buttons.
*/
private Pane createButtonsPane() {
prevBtn = button("Prev", "Go to \"Database connection\" page", new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
prev();
}
});
nextBtn = button("Next", "Go to \"POJO and XML generation\" page", new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
next();
}
});
return buttonsPane(Pos.BOTTOM_RIGHT, true, prevBtn, nextBtn);
}
/**
* @return {@code true} if some changes were made to fields metadata.
*/
private boolean changed() {
for (PojoDescriptor pojo : pojos)
if (pojo.changed())
return true;
return false;
}
/**
* Go to "Connect To Database" panel.
*/
private void prev() {
if (changed() && !MessageBox.confirmDialog(owner, "Are you sure you want to return to previous page?\n" +
"This will discard all your changes."))
return;
titleLb.setText("Connect To Database");
titleLb.setGraphic(imageView("data_connection", 48));
rootPane.setCenter(connLayerPnl);
prevBtn.setDisable(true);
nextBtn.setText("Next");
tooltip(nextBtn, "Go to \"XML and POJO generation\" page");
}
/**
* Check that text field is non empty.
*
* @param tf Text field to check.
* @param trim If {@code true} then
* @param msg Warning message.
* @return {@code true} If text field is empty.
*/
private boolean checkInput(TextField tf, boolean trim, String msg) {
String s = tf.getText();
s = trim ? s.trim() : s;
if (s.isEmpty()) {
tf.requestFocus();
MessageBox.warningDialog(owner, msg);
return true;
}
return false;
}
/**
* Go to "Generate XML And POJOs" panel or generate XML and POJOs.
*/
private void next() {
if (rootPane.getCenter() == connLayerPnl) {
if (checkInput(jdbcDrvJarTf, true, "Path to JDBC driver is not specified!") ||
checkInput(jdbcDrvClsTf, true, "JDBC driver class name is not specified!") ||
checkInput(jdbcUrlTf, true, "JDBC URL connection string is not specified!") ||
checkInput(userTf, true, "User name is not specified!"))
return;
fill();
}
else
generate();
}
/**
* Connect to database.
*
* @return Connection to database.
* @throws SQLException if connection failed.
*/
private Connection connect() throws SQLException {
String drvCls = jdbcDrvClsTf.getText();
Driver drv = drivers.get(drvCls);
if (drv == null) {
String path = jdbcDrvJarTf.getText().trim();
if (path.isEmpty())
throw new IllegalStateException("Driver jar file name is not specified");
File drvJar = new File(jdbcDrvJarTf.getText());
if (!drvJar.exists())
throw new IllegalStateException("Driver jar file is not found");
try {
URL u = new URL("jar:" + drvJar.toURI() + "!/");
URLClassLoader ucl = URLClassLoader.newInstance(new URL[] {u});
drv = (Driver)Class.forName(drvCls, true, ucl).newInstance();
drivers.put(drvCls, drv);
}
catch (Throwable e) {
throw new IllegalStateException(e);
}
}
String user = userTf.getText().trim();
String pwd = pwdTf.getText().trim();
Properties info = new Properties();
if (!user.isEmpty())
info.put("user", user);
if (!pwd.isEmpty())
info.put("password", pwd);
return drv.connect(jdbcUrlTf.getText(), info);
}
/**
* Create connection pane with controls.
*/
private Pane createConnectionPane() {
connPnl = paneEx(10, 10, 0, 10);
connPnl.addColumn();
connPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
connPnl.addColumn(35, 35, 35, Priority.NEVER);
connPnl.add(text("This utility is designed to automatically generate configuration XML files and" +
" POGO classes from database schema information.", 550), 3);
connPnl.wrap();
jdbcDrvJarTf = connPnl.addLabeled("Driver JAR:", textField("Path to driver jar"));
connPnl.add(button("...", "Select JDBC driver jar or zip", new EventHandler<ActionEvent>() {
/** {@inheritDoc} */
@Override public void handle(ActionEvent evt) {
FileChooser fc = new FileChooser();
fc.getExtensionFilters().addAll(
new FileChooser.ExtensionFilter("JDBC Drivers (*.jar)", "*.jar"),
new FileChooser.ExtensionFilter("ZIP archives (*.zip)", "*.zip"));
File drvJar = fc.showOpenDialog(owner);
if (drvJar != null)
jdbcDrvJarTf.setText(drvJar.getAbsolutePath());
}
}));
jdbcDrvClsTf = connPnl.addLabeled("JDBC Driver:", textField("Class name for JDBC driver"), 2);
jdbcUrlTf = connPnl.addLabeled("JDBC URL:", textField("JDBC URL of the database connection string"), 2);
userTf = connPnl.addLabeled("User:", textField("User name"), 2);
pwdTf = connPnl.addLabeled("Password:", passwordField("User password"), 2);
connLayerPnl = stackPane(connPnl);
return connLayerPnl;
}
/**
* Check if new class name is unique.
*
* @param pojo Current edited POJO.
* @param newVal New value for class name.
* @param key {@code true} if key class name is checked.
* @return {@code true} if class name is valid.
*/
private boolean checkClassNameUnique(PojoDescriptor pojo, String newVal, boolean key) {
for (PojoDescriptor otherPojo : pojos)
if (pojo != otherPojo) {
String otherKeyCls = otherPojo.keyClassName();
String otherValCls = otherPojo.valueClassName();
if (newVal.equals(otherKeyCls) || newVal.equals(otherValCls)) {
MessageBox.warningDialog(owner, (key ? "Key" : "Value") + " class name must be unique!");
return false;
}
}
return true;
}
/**
* Check if new class name is valid.
*
* @param pojo Current edited POJO.
* @param newVal New value for class name.
* @param key {@code true} if key class name is checked.
* @return {@code true} if class name is valid.
*/
private boolean checkClassName(PojoDescriptor pojo, String newVal, boolean key) {
if (key) {
if (newVal.equals(pojo.valueClassName())) {
MessageBox.warningDialog(owner, "Key class name must be different from value class name!");
return false;
}
}
else if (newVal.equals(pojo.keyClassName())) {
MessageBox.warningDialog(owner, "Value class name must be different from key class name!");
return false;
}
return checkClassNameUnique(pojo, newVal, key);
}
/**
* Create generate pane with controls.
*/
private void createGeneratePane() {
genPnl = paneEx(10, 10, 0, 10);
genPnl.addColumn();
genPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
genPnl.addColumn(35, 35, 35, Priority.NEVER);
genPnl.addRow(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
genPnl.addRows(8);
TableColumn<PojoDescriptor, Boolean> useCol = customColumn("Schema / Table", "use",
"If checked then this table will be used for XML and POJOs generation", PojoDescriptorCell.cellFactory());
TableColumn<PojoDescriptor, String> keyClsCol = textColumn("Key Class Name", "keyClassName", "Key class name",
new TextColumnValidator<PojoDescriptor>() {
@Override public boolean valid(PojoDescriptor rowVal, String newVal) {
return checkClassName(rowVal, newVal, true);
}
});
TableColumn<PojoDescriptor, String> valClsCol = textColumn("Value Class Name", "valueClassName", "Value class name",
new TextColumnValidator<PojoDescriptor>() {
@Override public boolean valid(PojoDescriptor rowVal, String newVal) {
return checkClassName(rowVal, newVal, false);
}
});
pojosTbl = tableView("Tables not found in database", useCol, keyClsCol, valClsCol);
TableColumn<PojoField, Boolean> keyCol = booleanColumn("Key", "key",
"If checked then this field will be part of key object");
TableColumn<PojoField, String> dbNameCol = tableColumn("DB Name", "dbName", "Field name in database");
TableColumn<PojoField, String> dbTypeNameCol = tableColumn("DB Type", "dbTypeName", "Field type in database");
TableColumn<PojoField, String> javaNameCol = textColumn("Java Name", "javaName", "Field name in POJO class",
new TextColumnValidator<PojoField>() {
@Override public boolean valid(PojoField rowVal, String newVal) {
for (PojoField field : curPojo.fields())
if (rowVal != field && newVal.equals(field.javaName())) {
MessageBox.warningDialog(owner, "Java name must be unique!");
return false;
}
return true;
}
});
TableColumn<PojoField, String> javaTypeNameCol = customColumn("Java Type", "javaTypeName",
"Field java type in POJO class", JavaTypeCell.cellFactory());
final TableView<PojoField> fieldsTbl = tableView("Select table to see table columns",
keyCol, dbNameCol, dbTypeNameCol, javaNameCol, javaTypeNameCol);
genPnl.add(splitPane(pojosTbl, fieldsTbl, 0.6), 3);
final GridPaneEx keyValPnl = paneEx(0, 0, 0, 0);
keyValPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
keyValPnl.addColumn();
keyValPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
keyValPnl.addColumn();
pkgTf = genPnl.addLabeled("Package:", textField("Package that will be used for POJOs generation"), 2);
outFolderTf = genPnl.addLabeled("Output Folder:", textField("Output folder for XML and POJOs files"));
genPnl.add(button("...", "Select output folder", new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
DirectoryChooser dc = new DirectoryChooser();
File folder = dc.showDialog(owner);
if (folder != null)
outFolderTf.setText(folder.getAbsolutePath());
}
}));
pojoIncludeKeysCh = genPnl.add(checkBox("Include key fields into value POJOs",
"If selected then include key fields into value object", true), 3);
pojoConstructorCh = genPnl.add(checkBox("Generate constructors for POJOs",
"If selected then generate empty and full constructors for POJOs", false), 3);
xmlSingleFileCh = genPnl.add(checkBox("Write all configurations to a single XML file",
"If selected then all configurations will be saved into the file 'Ignite.xml'", true), 3);
openFolderCh = genPnl.add(checkBox("Reveal output folder",
"Open output folder in system file manager after generation complete", true), 3);
genPnl.add(new Separator(), 3);
GridPaneEx regexPnl = paneEx(0, 0, 0, 0);
regexPnl.addColumn();
regexPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
regexPnl.addColumn();
regexPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
regexPnl.add(label("Replace \"Key class name\", \"Value class name\" or \"Java name\" for selected tables:"), 4);
regexTf = regexPnl.addLabeled(" Regexp:", textField("Regular expression. For example: (\\w+)"));
replaceTf = regexPnl.addLabeled(" Replace with:", textField("Replace text. For example: $1_SomeText"));
final ComboBox<String> replaceCb = regexPnl.addLabeled(" Replace:", comboBox("Replacement target",
"Key class names", "Value class names", "Java names"));
regexPnl.add(buttonsPane(Pos.CENTER_LEFT, false,
button("Rename Selected", "Replaces each substring of this string that matches the given regular expression" +
" with the given replacement.",
new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
if (checkInput(regexTf, false, "Regular expression should not be empty!"))
return;
String sel = replaceCb.getSelectionModel().getSelectedItem();
boolean renFields = "Java names".equals(sel);
String src = (renFields ? "fields" : "tables");
String target = "\"" + sel +"\"";
Collection<PojoDescriptor> selItems = selectedItems();
if (selItems.isEmpty()) {
MessageBox.warningDialog(owner, "Please select " + src + " to rename " + target + "!");
return;
}
if (!MessageBox.confirmDialog(owner, "Are you sure you want to rename " + target +
" for all selected " + src + "?"))
return;
String regex = regexTf.getText();
String replace = replaceTf.getText();
try {
for (PojoDescriptor pojo : selItems)
for (PojoField field : pojo.fields())
field.javaName(field.javaName().replaceAll(regex, replace));
}
catch (Exception e) {
MessageBox.errorDialog(owner, "Failed to rename " + target + "!", e);
}
}
}),
button("Reset Selected", "Revert changes for selected items to initial values", new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
Collection<PojoDescriptor> selItems = selectedItems();
String sel = replaceCb.getSelectionModel().getSelectedItem();
boolean renFields = "Java names".equals(sel);
String src = (renFields ? "fields" : "tables");
String target = "\"" + sel +"\"";
if (selItems.isEmpty()) {
MessageBox.warningDialog(owner, "Please select " + src + "to revert " + target + "!");
return;
}
if (!MessageBox.confirmDialog(owner,
"Are you sure you want to revert " + target + " for all selected " + src + "?"))
return;
for (PojoDescriptor pojo : selItems)
pojo.revertJavaNames();
}
})
), 2).setPadding(new Insets(0, 0, 0, 10));
pojosTbl.getSelectionModel().selectedItemProperty().addListener(new ChangeListener<PojoDescriptor>() {
@Override public void changed(ObservableValue<? extends PojoDescriptor> val,
PojoDescriptor oldVal, PojoDescriptor newItem) {
if (newItem != null && newItem.parent() != null) {
curPojo = newItem;
fieldsTbl.setItems(curPojo.fields());
fieldsTbl.getSelectionModel().select(0);
keyValPnl.setDisable(false);
}
else {
curPojo = null;
fieldsTbl.setItems(NO_FIELDS);
keyValPnl.setDisable(true);
}
}
});
genPnl.add(regexPnl, 3);
genLayerPnl = stackPane(genPnl);
}
/**
* @return Selected tree view items.
*/
private Collection<PojoDescriptor> selectedItems() {
Collection<PojoDescriptor> res = new ArrayList<>();
for (PojoDescriptor pojo : pojos)
if (pojo.selected())
res.add(pojo);
return res;
}
/** {@inheritDoc} */
@Override public void start(Stage primaryStage) {
owner = primaryStage;
primaryStage.setTitle("Apache Ignite Auto Schema Load Utility");
primaryStage.getIcons().addAll(
image("ignite", 16),
image("ignite", 24),
image("ignite", 32),
image("ignite", 48),
image("ignite", 64),
image("ignite", 128));
pi = progressIndicator(50);
createGeneratePane();
rootPane = borderPane(createHeaderPane(), createConnectionPane(), createButtonsPane(), null, null);
primaryStage.setScene(scene(rootPane));
primaryStage.setWidth(600);
primaryStage.setMinWidth(600);
primaryStage.setHeight(650);
primaryStage.setMinHeight(650);
prev();
Preferences userPrefs = Preferences.userNodeForPackage(getClass());
// Restore window pos and size.
if (userPrefs.get("window.x", null) != null) {
double x = userPrefs.getDouble("window.x", 100);
double y = userPrefs.getDouble("window.y", 100);
double w = userPrefs.getDouble("window.width", 650);
double h = userPrefs.getDouble("window.height", 650);
// Ensure that window fit any available screen.
if (!Screen.getScreensForRectangle(x, y, w, h).isEmpty()) {
if (x > 0)
primaryStage.setX(x);
if (y > 0)
primaryStage.setY(y);
primaryStage.setWidth(w);
primaryStage.setHeight(h);
}
}
else
primaryStage.centerOnScreen();
String userHome = System.getProperty("user.home").replace('\\', '/');
// Restore connection pane settings.
jdbcDrvJarTf.setText(userPrefs.get("jdbc.driver.jar", "h2.jar"));
jdbcDrvClsTf.setText(userPrefs.get("jdbc.driver.class", "org.h2.Driver"));
jdbcUrlTf.setText(userPrefs.get("jdbc.url", "jdbc:h2:" + userHome + "/schema-load/db"));
userTf.setText(userPrefs.get("jdbc.user", "sa"));
// Restore generation pane settings.
outFolderTf.setText(userPrefs.get("out.folder", userHome + "/schema-load/out"));
openFolderCh.setSelected(userPrefs.getBoolean("out.folder.open", true));
pkgTf.setText(userPrefs.get("pojo.package", "org.apache.ignite"));
pojoIncludeKeysCh.setSelected(userPrefs.getBoolean("pojo.include", true));
pojoConstructorCh.setSelected(userPrefs.getBoolean("pojo.constructor", false));
xmlSingleFileCh.setSelected(userPrefs.getBoolean("xml.single", true));
regexTf.setText(userPrefs.get("naming.pattern", ""));
replaceTf.setText(userPrefs.get("naming.replace", ""));
primaryStage.show();
}
/** {@inheritDoc} */
@Override public void stop() throws Exception {
Preferences userPrefs = Preferences.userNodeForPackage(getClass());
// Save window pos and size.
userPrefs.putDouble("window.x", owner.getX());
userPrefs.putDouble("window.y", owner.getY());
userPrefs.putDouble("window.width", owner.getWidth());
userPrefs.putDouble("window.height", owner.getHeight());
// Save connection pane settings.
userPrefs.put("jdbc.driver.jar", jdbcDrvJarTf.getText());
userPrefs.put("jdbc.driver.class", jdbcDrvClsTf.getText());
userPrefs.put("jdbc.url", jdbcUrlTf.getText());
userPrefs.put("jdbc.user", userTf.getText());
// Save generation pane settings.
userPrefs.put("out.folder", outFolderTf.getText());
userPrefs.putBoolean("out.folder.open", openFolderCh.isSelected());
userPrefs.put("pojo.package", pkgTf.getText());
userPrefs.putBoolean("pojo.include", pojoIncludeKeysCh.isSelected());
userPrefs.putBoolean("pojo.constructor", pojoConstructorCh.isSelected());
userPrefs.putBoolean("xml.single", xmlSingleFileCh.isSelected());
userPrefs.put("naming.pattern", regexTf.getText());
userPrefs.put("naming.replace", replaceTf.getText());
}
/**
* Schema load utility launcher.
*
* @param args Command line arguments passed to the application.
*/
public static void main(String[] args) {
// Workaround for JavaFX ugly text AA.
System.setProperty("prism.lcdtext", "false");
System.setProperty("prism.text", "t2k");
// Workaround for AWT + JavaFX: we should initialize AWT before JavaFX.
java.awt.Toolkit.getDefaultToolkit();
// Workaround for JavaFX + Mac OS dock icon.
if (System.getProperty("os.name").toLowerCase().contains("mac os")) {
System.setProperty("javafx.macosx.embedded", "true");
try {
Class<?> appCls = Class.forName("com.apple.eawt.Application");
Object osxApp = appCls.getDeclaredMethod("getApplication").invoke(null);
appCls.getDeclaredMethod("setDockIconImage", java.awt.Image.class)
.invoke(osxApp, fromFXImage(image("ignite", 128), null));
}
catch (Throwable ignore) {
// No-op.
}
}
launch(args);
}
/**
* Special table cell to select possible java type conversions.
*/
private static class JavaTypeCell extends TableCell<PojoField, String> {
/** Combo box. */
private final ComboBox<String> comboBox;
/** Creates a ComboBox cell factory for use in TableColumn controls. */
public static Callback<TableColumn<PojoField, String>, TableCell<PojoField, String>> cellFactory() {
return new Callback<TableColumn<PojoField, String>, TableCell<PojoField, String>>() {
@Override public TableCell<PojoField, String> call(TableColumn<PojoField, String> col) {
return new JavaTypeCell();
}
};
}
/**
* Default constructor.
*/
private JavaTypeCell() {
comboBox = new ComboBox<>(FXCollections.<String>emptyObservableList());
comboBox.valueProperty().addListener(new ChangeListener<String>() {
@Override public void changed(ObservableValue<? extends String> val, String oldVal, String newVal) {
if (isEditing())
commitEdit(newVal);
}
});
getStyleClass().add("combo-box-table-cell");
}
/** {@inheritDoc} */
@Override public void startEdit() {
if (comboBox.getItems().size() > 1) {
comboBox.getSelectionModel().select(getItem());
super.startEdit();
setText(null);
setGraphic(comboBox);
}
}
/** {@inheritDoc} */
@Override public void cancelEdit() {
super.cancelEdit();
setText(getItem());
setGraphic(null);
}
/** {@inheritDoc} */
@Override public void updateItem(String item, boolean empty) {
super.updateItem(item, empty);
setGraphic(null);
if (!empty) {
setText(item);
TableRow row = getTableRow();
if (row != null) {
PojoField pojo = (PojoField)row.getItem();
if (pojo != null) {
comboBox.setItems(pojo.conversions());
comboBox.getSelectionModel().select(pojo.javaTypeName());
}
}
}
}
}
/**
* Special table cell to select schema or table.
*/
private static class PojoDescriptorCell extends TableCell<PojoDescriptor, Boolean> {
/** Creates a ComboBox cell factory for use in TableColumn controls. */
public static Callback<TableColumn<PojoDescriptor, Boolean>, TableCell<PojoDescriptor, Boolean>> cellFactory() {
return new Callback<TableColumn<PojoDescriptor, Boolean>, TableCell<PojoDescriptor, Boolean>>() {
@Override public TableCell<PojoDescriptor, Boolean> call(TableColumn<PojoDescriptor, Boolean> col) {
return new PojoDescriptorCell();
}
};
}
/** Previous POJO bound to cell. */
private PojoDescriptor prevPojo;
/** {@inheritDoc} */
@Override public void updateItem(Boolean item, boolean empty) {
super.updateItem(item, empty);
if (!empty) {
TableRow row = getTableRow();
if (row != null) {
final PojoDescriptor pojo = (PojoDescriptor)row.getItem();
if (pojo != prevPojo) {
prevPojo = pojo;
boolean isTbl = pojo.parent() != null;
CheckBox ch = new CheckBox(isTbl ? pojo.table() : pojo.schema());
ch.setAllowIndeterminate(false);
ch.setMnemonicParsing(false);
ch.indeterminateProperty().bindBidirectional(pojo.indeterminate());
ch.selectedProperty().bindBidirectional(pojo.useProperty());
Pane pnl = new HBox();
pnl.setPadding(new Insets(0, 0, 0, isTbl ? 25 : 5));
pnl.getChildren().add(ch);
setGraphic(pnl);
}
}
}
}
}
}
|
modules/schema-load/src/main/java/org/apache/ignite/schema/ui/SchemaLoadApp.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.schema.ui;
import javafx.application.*;
import javafx.beans.value.*;
import javafx.collections.*;
import javafx.concurrent.*;
import javafx.event.*;
import javafx.geometry.*;
import javafx.scene.*;
import javafx.scene.control.*;
import javafx.scene.layout.*;
import javafx.stage.*;
import javafx.util.*;
import org.apache.ignite.cache.query.*;
import org.apache.ignite.schema.generator.*;
import org.apache.ignite.schema.model.*;
import org.apache.ignite.schema.parser.*;
import java.io.*;
import java.net.*;
import java.sql.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.prefs.*;
import static javafx.embed.swing.SwingFXUtils.*;
import static org.apache.ignite.schema.ui.Controls.*;
/**
* Schema load application.
*/
@SuppressWarnings("UnnecessaryFullyQualifiedName")
public class SchemaLoadApp extends Application {
/** */
private Stage owner;
/** */
private BorderPane rootPane;
/** */
private Label titleLb;
/** */
private Button prevBtn;
/** */
private Button nextBtn;
/** */
private TextField jdbcDrvJarTf;
/** */
private TextField jdbcDrvClsTf;
/** */
private TextField jdbcUrlTf;
/** */
private TextField userTf;
/** */
private PasswordField pwdTf;
/** */
private GridPaneEx connPnl;
/** */
private StackPane connLayerPnl;
/** */
private TableView<PojoDescriptor> pojosTbl;
/** */
private TextField outFolderTf;
/** */
private TextField pkgTf;
/** */
private CheckBox pojoConstructorCh;
/** */
private CheckBox pojoIncludeKeysCh;
/** */
private CheckBox xmlSingleFileCh;
/** */
private CheckBox openFolderCh;
/** */
private TextField regexTf;
/** */
private TextField replaceTf;
/** */
private GridPaneEx genPnl;
/** */
private StackPane genLayerPnl;
/** */
private ProgressIndicator pi;
/** List with POJOs descriptors. */
private ObservableList<PojoDescriptor> pojos = FXCollections.emptyObservableList();
/** Currently selected POJO. */
private PojoDescriptor curPojo;
/** */
private final Map<String, Driver> drivers = new HashMap<>();
/** */
private static final ObservableList<PojoField> NO_FIELDS = FXCollections.emptyObservableList();
/** */
private final ExecutorService exec = Executors.newSingleThreadExecutor(new ThreadFactory() {
@Override public Thread newThread(Runnable r) {
Thread t = new Thread(r, "schema-load-worker");
t.setDaemon(true);
return t;
}
});
/**
* Lock UI before start long task.
*
* @param layer Stack pane to add progress indicator.
* @param controls Controls to disable.
*/
private void lockUI(StackPane layer, Node... controls) {
for (Node control : controls)
control.setDisable(true);
layer.getChildren().add(pi);
}
/**
* Unlock UI after long task finished.
*
* @param layer Stack pane to remove progress indicator.
* @param controls Controls to enable.
*/
private void unlockUI(StackPane layer, Node... controls) {
for (Node control : controls)
control.setDisable(false);
layer.getChildren().remove(pi);
}
/**
* Perceptual delay to avoid UI flickering.
*
* @param started Time when background progress started.
*/
private void perceptualDelay(long started) {
long delta = System.currentTimeMillis() - started;
if (delta < 500)
try {
Thread.sleep(500 - delta);
}
catch (InterruptedException ignored) {
Thread.currentThread().interrupt();
}
}
/**
* Fill tree with database metadata.
*/
private void fill() {
lockUI(connLayerPnl, connPnl, nextBtn);
Runnable task = new Task<Void>() {
/** {@inheritDoc} */
@Override protected Void call() throws Exception {
long started = System.currentTimeMillis();
try (Connection conn = connect()) {
pojos = DatabaseMetadataParser.parse(conn);
}
perceptualDelay(started);
return null;
}
/** {@inheritDoc} */
@Override protected void succeeded() {
super.succeeded();
pojosTbl.setItems(pojos);
if (!pojos.isEmpty())
pojosTbl.getSelectionModel().select(pojos.get(0));
unlockUI(connLayerPnl, connPnl, nextBtn);
titleLb.setText("Generate XML And POJOs");
titleLb.setGraphic(imageView("text_tree", 48));
rootPane.setCenter(genLayerPnl);
prevBtn.setDisable(false);
nextBtn.setText("Generate");
tooltip(nextBtn, "Generate XML and POJO files");
}
/** {@inheritDoc} */
@Override protected void cancelled() {
super.cancelled();
unlockUI(connLayerPnl, connPnl, nextBtn);
}
/** {@inheritDoc} */
@Override protected void failed() {
super.succeeded();
unlockUI(connLayerPnl, connPnl, nextBtn);
MessageBox.errorDialog(owner, "Failed to get tables list from database.", getException());
}
};
exec.submit(task);
}
/**
* Generate XML and POJOs.
*/
private void generate() {
final Collection<PojoDescriptor> selPojos = selectedItems();
if (selPojos.isEmpty()) {
MessageBox.warningDialog(owner, "Please select tables to generate XML and POJOs files!");
return;
}
lockUI(genLayerPnl, genPnl, prevBtn, nextBtn);
final String outFolder = outFolderTf.getText();
final String pkg = pkgTf.getText();
final File destFolder = new File(outFolder);
Runnable task = new Task<Void>() {
private void checkEmpty(final PojoDescriptor pojo, Collection<CacheQueryTypeDescriptor> descs,
String msg) {
if (descs.isEmpty()) {
Platform.runLater(new Runnable() {
@Override public void run() {
pojosTbl.getSelectionModel().select(pojo);
}
});
throw new IllegalStateException(msg + pojo.table());
}
}
/** {@inheritDoc} */
@Override protected Void call() throws Exception {
long started = System.currentTimeMillis();
if (!destFolder.exists() && !destFolder.mkdirs())
throw new IOException("Failed to create output folder: " + destFolder);
Collection<CacheQueryTypeMetadata> all = new ArrayList<>();
boolean constructor = pojoConstructorCh.isSelected();
boolean includeKeys = pojoIncludeKeysCh.isSelected();
boolean singleXml = xmlSingleFileCh.isSelected();
ConfirmCallable askOverwrite = new ConfirmCallable(owner, "File already exists: %s\nOverwrite?");
// Generate XML and POJO.
for (PojoDescriptor pojo : selPojos) {
if (pojo.selected()) {
CacheQueryTypeMetadata meta = pojo.metadata(includeKeys);
checkEmpty(pojo, meta.getKeyDescriptors(), "No key fields specified for type: ");
checkEmpty(pojo, meta.getValueDescriptors(), "No value fields specified for type: ");
all.add(meta);
if (!singleXml)
XmlGenerator.generate(pkg, meta, new File(destFolder, meta.getType() + ".xml"),
askOverwrite);
PojoGenerator.generate(pojo, outFolder, pkg, constructor, includeKeys, askOverwrite);
}
}
if (singleXml)
XmlGenerator.generate(pkg, all, new File(outFolder, "Ignite.xml"), askOverwrite);
perceptualDelay(started);
return null;
}
/** {@inheritDoc} */
@Override protected void succeeded() {
super.succeeded();
unlockUI(genLayerPnl, genPnl, prevBtn, nextBtn);
MessageBox.informationDialog(owner, "Generation complete!");
if (openFolderCh.isSelected())
try {
java.awt.Desktop.getDesktop().open(destFolder);
}
catch (IOException e) {
MessageBox.errorDialog(owner, "Failed to open folder with results.", e);
}
}
/** {@inheritDoc} */
@Override protected void cancelled() {
super.cancelled();
unlockUI(genLayerPnl, genPnl, prevBtn, nextBtn);
MessageBox.warningDialog(owner, "Generation canceled.");
}
/** {@inheritDoc} */
@Override protected void failed() {
super.succeeded();
unlockUI(genLayerPnl, genPnl, prevBtn, nextBtn);
MessageBox.errorDialog(owner, "Generation failed.", getException());
}
};
exec.submit(task);
}
/**
* @return Header pane with title label.
*/
private Pane createHeaderPane() {
titleLb = label("");
titleLb.setId("banner");
BorderPane bp = borderPane(null, hBox(10, true, titleLb), null, null, hBox(0, true, imageView("ignite", 48)));
bp.setId("banner");
return bp;
}
/**
* @return Panel with control buttons.
*/
private Pane createButtonsPane() {
prevBtn = button("Prev", "Go to \"Database connection\" page", new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
prev();
}
});
nextBtn = button("Next", "Go to \"POJO and XML generation\" page", new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
next();
}
});
return buttonsPane(Pos.BOTTOM_RIGHT, true, prevBtn, nextBtn);
}
/**
* @return {@code true} if some changes were made to fields metadata.
*/
private boolean changed() {
for (PojoDescriptor pojo : pojos)
if (pojo.changed())
return true;
return false;
}
/**
* Go to "Connect To Database" panel.
*/
private void prev() {
if (changed() && !MessageBox.confirmDialog(owner, "Are you sure you want to return to previous page?\n" +
"This will discard all your changes."))
return;
titleLb.setText("Connect To Database");
titleLb.setGraphic(imageView("data_connection", 48));
rootPane.setCenter(connLayerPnl);
prevBtn.setDisable(true);
nextBtn.setText("Next");
tooltip(nextBtn, "Go to \"XML and POJO generation\" page");
}
/**
* Check that text field is non empty.
*
* @param tf Text field to check.
* @param trim If {@code true} then
* @param msg Warning message.
* @return {@code true} If text field is empty.
*/
private boolean checkInput(TextField tf, boolean trim, String msg) {
String s = tf.getText();
s = trim ? s.trim() : s;
if (s.isEmpty()) {
tf.requestFocus();
MessageBox.warningDialog(owner, msg);
return true;
}
return false;
}
/**
* Go to "Generate XML And POJOs" panel or generate XML and POJOs.
*/
private void next() {
if (rootPane.getCenter() == connLayerPnl) {
if (checkInput(jdbcDrvJarTf, true, "Path to JDBC driver is not specified!") ||
checkInput(jdbcDrvClsTf, true, "JDBC driver class name is not specified!") ||
checkInput(jdbcUrlTf, true, "JDBC URL connection string is not specified!") ||
checkInput(userTf, true, "User name is not specified!"))
return;
fill();
}
else
generate();
}
/**
* Connect to database.
*
* @return Connection to database.
* @throws SQLException if connection failed.
*/
private Connection connect() throws SQLException {
String drvCls = jdbcDrvClsTf.getText();
Driver drv = drivers.get(drvCls);
if (drv == null) {
String path = jdbcDrvJarTf.getText().trim();
if (path.isEmpty())
throw new IllegalStateException("Driver jar file name is not specified");
File drvJar = new File(jdbcDrvJarTf.getText());
if (!drvJar.exists())
throw new IllegalStateException("Driver jar file is not found");
try {
URL u = new URL("jar:" + drvJar.toURI() + "!/");
URLClassLoader ucl = URLClassLoader.newInstance(new URL[] {u});
drv = (Driver)Class.forName(drvCls, true, ucl).newInstance();
drivers.put(drvCls, drv);
}
catch (Throwable e) {
throw new IllegalStateException(e);
}
}
String user = userTf.getText().trim();
String pwd = pwdTf.getText().trim();
Properties info = new Properties();
if (!user.isEmpty())
info.put("user", user);
if (!pwd.isEmpty())
info.put("password", pwd);
return drv.connect(jdbcUrlTf.getText(), info);
}
/**
* Create connection pane with controls.
*/
private Pane createConnectionPane() {
connPnl = paneEx(10, 10, 0, 10);
connPnl.addColumn();
connPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
connPnl.addColumn(35, 35, 35, Priority.NEVER);
connPnl.add(text("This utility is designed to automatically generate configuration XML files and" +
" POGO classes from database schema information.", 550), 3);
connPnl.wrap();
jdbcDrvJarTf = connPnl.addLabeled("Driver JAR:", textField("Path to driver jar"));
connPnl.add(button("...", "Select JDBC driver jar or zip", new EventHandler<ActionEvent>() {
/** {@inheritDoc} */
@Override public void handle(ActionEvent evt) {
FileChooser fc = new FileChooser();
fc.getExtensionFilters().addAll(
new FileChooser.ExtensionFilter("JDBC Drivers (*.jar)", "*.jar"),
new FileChooser.ExtensionFilter("ZIP archives (*.zip)", "*.zip"));
File drvJar = fc.showOpenDialog(owner);
if (drvJar != null)
jdbcDrvJarTf.setText(drvJar.getAbsolutePath());
}
}));
jdbcDrvClsTf = connPnl.addLabeled("JDBC Driver:", textField("Class name for JDBC driver"), 2);
jdbcUrlTf = connPnl.addLabeled("JDBC URL:", textField("JDBC URL of the database connection string"), 2);
userTf = connPnl.addLabeled("User:", textField("User name"), 2);
pwdTf = connPnl.addLabeled("Password:", passwordField("User password"), 2);
connLayerPnl = stackPane(connPnl);
return connLayerPnl;
}
/**
* Check if new class name is unique.
*
* @param pojo Current edited POJO.
* @param newVal New value for class name.
* @param key {@code true} if key class name is checked.
* @return {@code true} if class name is valid.
*/
private boolean checkClassNameUnique(PojoDescriptor pojo, String newVal, boolean key) {
for (PojoDescriptor otherPojo : pojos)
if (pojo != otherPojo) {
String otherKeyCls = otherPojo.keyClassName();
String otherValCls = otherPojo.valueClassName();
if (newVal.equals(otherKeyCls) || newVal.equals(otherValCls)) {
MessageBox.warningDialog(owner, (key ? "Key" : "Value") + " class name must be unique!");
return false;
}
}
return true;
}
/**
* Check if new class name is valid.
*
* @param pojo Current edited POJO.
* @param newVal New value for class name.
* @param key {@code true} if key class name is checked.
* @return {@code true} if class name is valid.
*/
private boolean checkClassName(PojoDescriptor pojo, String newVal, boolean key) {
if (key) {
if (newVal.equals(pojo.valueClassName())) {
MessageBox.warningDialog(owner, "Key class name must be different from value class name!");
return false;
}
}
else if (newVal.equals(pojo.keyClassName())) {
MessageBox.warningDialog(owner, "Value class name must be different from key class name!");
return false;
}
return checkClassNameUnique(pojo, newVal, key);
}
/**
* Create generate pane with controls.
*/
private void createGeneratePane() {
genPnl = paneEx(10, 10, 0, 10);
genPnl.addColumn();
genPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
genPnl.addColumn(35, 35, 35, Priority.NEVER);
genPnl.addRow(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
genPnl.addRows(7);
TableColumn<PojoDescriptor, Boolean> useCol = customColumn("Schema / Table", "use",
"If checked then this table will be used for XML and POJOs generation", PojoDescriptorCell.cellFactory());
TableColumn<PojoDescriptor, String> keyClsCol = textColumn("Key Class Name", "keyClassName", "Key class name",
new TextColumnValidator<PojoDescriptor>() {
@Override public boolean valid(PojoDescriptor rowVal, String newVal) {
return checkClassName(rowVal, newVal, true);
}
});
TableColumn<PojoDescriptor, String> valClsCol = textColumn("Value Class Name", "valueClassName", "Value class name",
new TextColumnValidator<PojoDescriptor>() {
@Override public boolean valid(PojoDescriptor rowVal, String newVal) {
return checkClassName(rowVal, newVal, false);
}
});
pojosTbl = tableView("Tables not found in database", useCol, keyClsCol, valClsCol);
TableColumn<PojoField, Boolean> keyCol = booleanColumn("Key", "key",
"If checked then this field will be part of key object");
TableColumn<PojoField, String> dbNameCol = tableColumn("DB Name", "dbName", "Field name in database");
TableColumn<PojoField, String> dbTypeNameCol = tableColumn("DB Type", "dbTypeName", "Field type in database");
TableColumn<PojoField, String> javaNameCol = textColumn("Java Name", "javaName", "Field name in POJO class",
new TextColumnValidator<PojoField>() {
@Override public boolean valid(PojoField rowVal, String newVal) {
for (PojoField field : curPojo.fields())
if (rowVal != field && newVal.equals(field.javaName())) {
MessageBox.warningDialog(owner, "Java name must be unique!");
return false;
}
return true;
}
});
TableColumn<PojoField, String> javaTypeNameCol = customColumn("Java Type", "javaTypeName",
"Field java type in POJO class", JavaTypeCell.cellFactory());
final TableView<PojoField> fieldsTbl = tableView("Select table to see table columns",
keyCol, dbNameCol, dbTypeNameCol, javaNameCol, javaTypeNameCol);
genPnl.add(splitPane(pojosTbl, fieldsTbl, 0.6), 3);
final GridPaneEx keyValPnl = paneEx(0, 0, 0, 0);
keyValPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
keyValPnl.addColumn();
keyValPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
keyValPnl.addColumn();
pkgTf = genPnl.addLabeled("Package:", textField("Package that will be used for POJOs generation"), 2);
outFolderTf = genPnl.addLabeled("Output Folder:", textField("Output folder for XML and POJOs files"));
genPnl.add(button("...", "Select output folder", new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
DirectoryChooser dc = new DirectoryChooser();
File folder = dc.showDialog(owner);
if (folder != null)
outFolderTf.setText(folder.getAbsolutePath());
}
}));
pojoIncludeKeysCh = genPnl.add(checkBox("Include key fields into value POJOs",
"If selected then include key fields into value object", true), 3);
pojoConstructorCh = genPnl.add(checkBox("Generate constructors for POJOs",
"If selected then generate empty and full constructors for POJOs", false), 3);
xmlSingleFileCh = genPnl.add(checkBox("Write all configurations to a single XML file",
"If selected then all configurations will be saved into the file 'Ignite.xml'", true), 3);
openFolderCh = genPnl.add(checkBox("Reveal output folder",
"Open output folder in system file manager after generation complete", true), 3);
GridPaneEx regexPnl = paneEx(0, 0, 0, 0);
regexPnl.addColumn();
regexPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
regexPnl.addColumn();
regexPnl.addColumn(100, 100, Double.MAX_VALUE, Priority.ALWAYS);
regexPnl.add(label("Replace \"Key class name\", \"Value class name\" or \"Java name\" for selected tables:"), 4);
regexTf = regexPnl.addLabeled(" Regexp:", textField("Regular expression. For example: (\\w+)"));
replaceTf = regexPnl.addLabeled(" Replace with:", textField("Replace text. For example: $1_SomeText"));
final ComboBox<String> replaceCb = regexPnl.addLabeled(" Replace:", comboBox("Replacement target",
"Key class names", "Value class names", "Java names"));
regexPnl.add(buttonsPane(Pos.BOTTOM_RIGHT, false,
button("Rename Selected", "Replaces each substring of this string that matches the given regular expression" +
" with the given replacement.",
new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
if (checkInput(regexTf, false, "Regular expression should not be empty!"))
return;
String sel = replaceCb.getSelectionModel().getSelectedItem();
boolean renFields = "Java names".equals(sel);
String src = (renFields ? "fields" : "tables");
String target = "\"" + sel +"\"";
Collection<PojoDescriptor> selItems = selectedItems();
if (selItems.isEmpty()) {
MessageBox.warningDialog(owner, "Please select " + src + " to rename " + target + "!");
return;
}
if (!MessageBox.confirmDialog(owner, "Are you sure you want to rename " + target +
" for all selected " + src + "?"))
return;
String regex = regexTf.getText();
String replace = replaceTf.getText();
try {
for (PojoDescriptor pojo : selItems)
for (PojoField field : pojo.fields())
field.javaName(field.javaName().replaceAll(regex, replace));
}
catch (Exception e) {
MessageBox.errorDialog(owner, "Failed to rename " + target + "!", e);
}
}
}),
button("Reset Selected", "Revert changes for selected items to initial values", new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent evt) {
Collection<PojoDescriptor> selItems = selectedItems();
String sel = replaceCb.getSelectionModel().getSelectedItem();
boolean renFields = "Java names".equals(sel);
String src = (renFields ? "fields" : "tables");
String target = "\"" + sel +"\"";
if (selItems.isEmpty()) {
MessageBox.warningDialog(owner, "Please select " + src + "to revert " + target + "!");
return;
}
if (!MessageBox.confirmDialog(owner,
"Are you sure you want to revert " + target + " for all selected " + src + "?"))
return;
for (PojoDescriptor pojo : selItems)
pojo.revertJavaNames();
}
})
), 2);
pojosTbl.getSelectionModel().selectedItemProperty().addListener(new ChangeListener<PojoDescriptor>() {
@Override public void changed(ObservableValue<? extends PojoDescriptor> val,
PojoDescriptor oldVal, PojoDescriptor newItem) {
if (newItem != null && newItem.parent() != null) {
curPojo = newItem;
fieldsTbl.setItems(curPojo.fields());
fieldsTbl.getSelectionModel().select(0);
keyValPnl.setDisable(false);
}
else {
curPojo = null;
fieldsTbl.setItems(NO_FIELDS);
keyValPnl.setDisable(true);
}
}
});
genPnl.add(regexPnl, 3);
genLayerPnl = stackPane(genPnl);
}
/**
* @return Selected tree view items.
*/
private Collection<PojoDescriptor> selectedItems() {
Collection<PojoDescriptor> res = new ArrayList<>();
for (PojoDescriptor pojo : pojos)
if (pojo.selected())
res.add(pojo);
return res;
}
/** {@inheritDoc} */
@Override public void start(Stage primaryStage) {
owner = primaryStage;
primaryStage.setTitle("Apache Ignite Auto Schema Load Utility");
primaryStage.getIcons().addAll(
image("ignite", 16),
image("ignite", 24),
image("ignite", 32),
image("ignite", 48),
image("ignite", 64),
image("ignite", 128));
pi = progressIndicator(50);
createGeneratePane();
rootPane = borderPane(createHeaderPane(), createConnectionPane(), createButtonsPane(), null, null);
primaryStage.setScene(scene(rootPane));
primaryStage.setWidth(600);
primaryStage.setMinWidth(600);
primaryStage.setHeight(650);
primaryStage.setMinHeight(650);
prev();
Preferences userPrefs = Preferences.userNodeForPackage(getClass());
// Restore window pos and size.
if (userPrefs.get("window.x", null) != null) {
double x = userPrefs.getDouble("window.x", 100);
double y = userPrefs.getDouble("window.y", 100);
double w = userPrefs.getDouble("window.width", 650);
double h = userPrefs.getDouble("window.height", 650);
// Ensure that window fit any available screen.
if (!Screen.getScreensForRectangle(x, y, w, h).isEmpty()) {
if (x > 0)
primaryStage.setX(x);
if (y > 0)
primaryStage.setY(y);
primaryStage.setWidth(w);
primaryStage.setHeight(h);
}
}
else
primaryStage.centerOnScreen();
String userHome = System.getProperty("user.home").replace('\\', '/');
// Restore connection pane settings.
jdbcDrvJarTf.setText(userPrefs.get("jdbc.driver.jar", "h2.jar"));
jdbcDrvClsTf.setText(userPrefs.get("jdbc.driver.class", "org.h2.Driver"));
jdbcUrlTf.setText(userPrefs.get("jdbc.url", "jdbc:h2:" + userHome + "/schema-load/db"));
userTf.setText(userPrefs.get("jdbc.user", "sa"));
// Restore generation pane settings.
outFolderTf.setText(userPrefs.get("out.folder", userHome + "/schema-load/out"));
openFolderCh.setSelected(userPrefs.getBoolean("out.folder.open", true));
pkgTf.setText(userPrefs.get("pojo.package", "org.apache.ignite"));
pojoIncludeKeysCh.setSelected(userPrefs.getBoolean("pojo.include", true));
pojoConstructorCh.setSelected(userPrefs.getBoolean("pojo.constructor", false));
xmlSingleFileCh.setSelected(userPrefs.getBoolean("xml.single", true));
regexTf.setText(userPrefs.get("naming.pattern", ""));
replaceTf.setText(userPrefs.get("naming.replace", ""));
primaryStage.show();
}
/** {@inheritDoc} */
@Override public void stop() throws Exception {
Preferences userPrefs = Preferences.userNodeForPackage(getClass());
// Save window pos and size.
userPrefs.putDouble("window.x", owner.getX());
userPrefs.putDouble("window.y", owner.getY());
userPrefs.putDouble("window.width", owner.getWidth());
userPrefs.putDouble("window.height", owner.getHeight());
// Save connection pane settings.
userPrefs.put("jdbc.driver.jar", jdbcDrvJarTf.getText());
userPrefs.put("jdbc.driver.class", jdbcDrvClsTf.getText());
userPrefs.put("jdbc.url", jdbcUrlTf.getText());
userPrefs.put("jdbc.user", userTf.getText());
// Save generation pane settings.
userPrefs.put("out.folder", outFolderTf.getText());
userPrefs.putBoolean("out.folder.open", openFolderCh.isSelected());
userPrefs.put("pojo.package", pkgTf.getText());
userPrefs.putBoolean("pojo.include", pojoIncludeKeysCh.isSelected());
userPrefs.putBoolean("pojo.constructor", pojoConstructorCh.isSelected());
userPrefs.putBoolean("xml.single", xmlSingleFileCh.isSelected());
userPrefs.put("naming.pattern", regexTf.getText());
userPrefs.put("naming.replace", replaceTf.getText());
}
/**
* Schema load utility launcher.
*
* @param args Command line arguments passed to the application.
*/
public static void main(String[] args) {
// Workaround for JavaFX ugly text AA.
System.setProperty("prism.lcdtext", "false");
System.setProperty("prism.text", "t2k");
// Workaround for AWT + JavaFX: we should initialize AWT before JavaFX.
java.awt.Toolkit.getDefaultToolkit();
// Workaround for JavaFX + Mac OS dock icon.
if (System.getProperty("os.name").toLowerCase().contains("mac os")) {
System.setProperty("javafx.macosx.embedded", "true");
try {
Class<?> appCls = Class.forName("com.apple.eawt.Application");
Object osxApp = appCls.getDeclaredMethod("getApplication").invoke(null);
appCls.getDeclaredMethod("setDockIconImage", java.awt.Image.class)
.invoke(osxApp, fromFXImage(image("ignite", 128), null));
}
catch (Throwable ignore) {
// No-op.
}
}
launch(args);
}
/**
* Special table cell to select possible java type conversions.
*/
private static class JavaTypeCell extends TableCell<PojoField, String> {
/** Combo box. */
private final ComboBox<String> comboBox;
/** Creates a ComboBox cell factory for use in TableColumn controls. */
public static Callback<TableColumn<PojoField, String>, TableCell<PojoField, String>> cellFactory() {
return new Callback<TableColumn<PojoField, String>, TableCell<PojoField, String>>() {
@Override public TableCell<PojoField, String> call(TableColumn<PojoField, String> col) {
return new JavaTypeCell();
}
};
}
/**
* Default constructor.
*/
private JavaTypeCell() {
comboBox = new ComboBox<>(FXCollections.<String>emptyObservableList());
comboBox.valueProperty().addListener(new ChangeListener<String>() {
@Override public void changed(ObservableValue<? extends String> val, String oldVal, String newVal) {
if (isEditing())
commitEdit(newVal);
}
});
getStyleClass().add("combo-box-table-cell");
}
/** {@inheritDoc} */
@Override public void startEdit() {
if (comboBox.getItems().size() > 1) {
comboBox.getSelectionModel().select(getItem());
super.startEdit();
setText(null);
setGraphic(comboBox);
}
}
/** {@inheritDoc} */
@Override public void cancelEdit() {
super.cancelEdit();
setText(getItem());
setGraphic(null);
}
/** {@inheritDoc} */
@Override public void updateItem(String item, boolean empty) {
super.updateItem(item, empty);
setGraphic(null);
if (!empty) {
setText(item);
TableRow row = getTableRow();
if (row != null) {
PojoField pojo = (PojoField)row.getItem();
if (pojo != null) {
comboBox.setItems(pojo.conversions());
comboBox.getSelectionModel().select(pojo.javaTypeName());
}
}
}
}
}
/**
* Special table cell to select schema or table.
*/
private static class PojoDescriptorCell extends TableCell<PojoDescriptor, Boolean> {
/** Creates a ComboBox cell factory for use in TableColumn controls. */
public static Callback<TableColumn<PojoDescriptor, Boolean>, TableCell<PojoDescriptor, Boolean>> cellFactory() {
return new Callback<TableColumn<PojoDescriptor, Boolean>, TableCell<PojoDescriptor, Boolean>>() {
@Override public TableCell<PojoDescriptor, Boolean> call(TableColumn<PojoDescriptor, Boolean> col) {
return new PojoDescriptorCell();
}
};
}
/** Previous POJO bound to cell. */
private PojoDescriptor prevPojo;
/** {@inheritDoc} */
@Override public void updateItem(Boolean item, boolean empty) {
super.updateItem(item, empty);
if (!empty) {
TableRow row = getTableRow();
if (row != null) {
final PojoDescriptor pojo = (PojoDescriptor)row.getItem();
if (pojo != prevPojo) {
prevPojo = pojo;
boolean isTbl = pojo.parent() != null;
CheckBox ch = new CheckBox(isTbl ? pojo.table() : pojo.schema());
ch.setAllowIndeterminate(false);
ch.setMnemonicParsing(false);
ch.indeterminateProperty().bindBidirectional(pojo.indeterminate());
ch.selectedProperty().bindBidirectional(pojo.useProperty());
Pane pnl = new HBox();
pnl.setPadding(new Insets(0, 0, 0, isTbl ? 25 : 5));
pnl.getChildren().add(ch);
setGraphic(pnl);
}
}
}
}
}
}
|
# IGNITE-32:Minor UI tweaks.
|
modules/schema-load/src/main/java/org/apache/ignite/schema/ui/SchemaLoadApp.java
|
# IGNITE-32:Minor UI tweaks.
|
|
Java
|
apache-2.0
|
de1c4cdbecd062ffbcd12b2a88584b4998556453
| 0
|
nikolaylagutko/spock-modelcitizen
|
/**
* Copyright 2015 Nikolay Lagutko <nikolay.lagutko@mail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gerzog.spock.modelcitizen;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.gerzog.spock.modelcitizen.api.Model;
import org.gerzog.spock.modelcitizen.api.UseBlueprints;
import org.spockframework.runtime.InvalidSpecException;
import org.spockframework.runtime.extension.AbstractAnnotationDrivenExtension;
import org.spockframework.runtime.model.FieldInfo;
import org.spockframework.runtime.model.SpecInfo;
import com.tobedevoured.modelcitizen.ModelFactory;
import com.tobedevoured.modelcitizen.RegisterBlueprintException;
/**
* @author Nikolay Lagutko (nikolay.lagutko@mail.com)
*
*/
public class ModelCitizenExtension extends AbstractAnnotationDrivenExtension<UseBlueprints> {
@Override
public void visitSpecAnnotation(final UseBlueprints annotation, final SpecInfo spec) {
ModelFactory factory = new ModelFactory();
initializeBlueprints(factory, annotation, spec);
registerInterceptor(factory, spec);
}
private void registerInterceptor(final ModelFactory factory, final SpecInfo spec) {
List<FieldInfo> modelFields = getModelFields(spec);
validateModelFields(modelFields);
spec.getSetupMethod().addInterceptor(new ModelCitizenMethodInterceptor(factory, modelFields));
}
private void validateModelFields(final List<FieldInfo> modelFields) {
modelFields.forEach(this::validateModelField);
}
private void validateModelField(final FieldInfo modelField) {
if (modelField.getType().equals(Object.class)) {
throw new InvalidSpecException("Object class was detected as @Model source. Please check you didn't use 'def' keyword to define @Model field");
}
}
private List<FieldInfo> getModelFields(final SpecInfo spec) {
return spec.getAllFields().stream().filter(field -> field.isAnnotationPresent(Model.class)).collect(Collectors.toList());
}
private void initializeBlueprints(final ModelFactory factory, final UseBlueprints annotation, final SpecInfo spec) {
try {
// register blueprints from classes
factory.setRegisterBlueprints(Arrays.asList(annotation.classes()));
// register blueprints from packages
for (String packageName : annotation.packagesToScan()) {
factory.setRegisterBlueprintsByPackage(packageName);
}
} catch (RegisterBlueprintException e) {
throw new InvalidSpecException("An error occured during ModelCitizen initialization. Please check your @UseBlueprints configuration for " + spec.getName() + " spec", e);
}
}
}
|
src/main/java/org/gerzog/spock/modelcitizen/ModelCitizenExtension.java
|
/**
* Copyright 2015 Nikolay Lagutko <nikolay.lagutko@mail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gerzog.spock.modelcitizen;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.gerzog.spock.modelcitizen.api.Model;
import org.gerzog.spock.modelcitizen.api.UseBlueprints;
import org.spockframework.runtime.InvalidSpecException;
import org.spockframework.runtime.extension.AbstractAnnotationDrivenExtension;
import org.spockframework.runtime.model.FieldInfo;
import org.spockframework.runtime.model.SpecInfo;
import com.tobedevoured.modelcitizen.ModelFactory;
import com.tobedevoured.modelcitizen.RegisterBlueprintException;
/**
* @author Nikolay Lagutko (nikolay.lagutko@mail.com)
*
*/
public class ModelCitizenExtension extends AbstractAnnotationDrivenExtension<UseBlueprints> {
@Override
public void visitSpecAnnotation(final UseBlueprints annotation, final SpecInfo spec) {
ModelFactory factory = new ModelFactory();
initializeBlueprints(factory, annotation, spec);
registerInterceptor(factory, spec);
}
private void registerInterceptor(final ModelFactory factory, final SpecInfo spec) {
List<FieldInfo> modelFields = getModelFields(spec);
validateModelFields(modelFields);
spec.addInterceptor(new ModelCitizenMethodInterceptor(factory, modelFields));
}
private void validateModelFields(final List<FieldInfo> modelFields) {
modelFields.forEach(this::validateModelField);
}
private void validateModelField(final FieldInfo modelField) {
if (modelField.getType().equals(Object.class)) {
throw new InvalidSpecException("Object class was detected as @Model source. Please check you didn't use 'def' keyword to define @Model field");
}
}
private List<FieldInfo> getModelFields(final SpecInfo spec) {
return spec.getAllFields().stream().filter(field -> field.isAnnotationPresent(Model.class)).collect(Collectors.toList());
}
private void initializeBlueprints(final ModelFactory factory, final UseBlueprints annotation, final SpecInfo spec) {
try {
// register blueprints from classes
factory.setRegisterBlueprints(Arrays.asList(annotation.classes()));
// register blueprints from packages
for (String packageName : annotation.packagesToScan()) {
factory.setRegisterBlueprintsByPackage(packageName);
}
} catch (RegisterBlueprintException e) {
throw new InvalidSpecException("An error occured during ModelCitizen initialization. Please check your @UseBlueprints configuration for " + spec.getName() + " spec", e);
}
}
}
|
register interceptor for setup method
|
src/main/java/org/gerzog/spock/modelcitizen/ModelCitizenExtension.java
|
register interceptor for setup method
|
|
Java
|
apache-2.0
|
282c39c03f8d5227191e5a4d989be5925023ba54
| 0
|
brandt/GridSphere,brandt/GridSphere
|
/*
* @author <a href="mailto:michael.russell@aei.mpg.de">Michael Russell</a>
* @version $Id$
*/
package org.gridlab.gridsphere.portlets.core.layout;
import org.gridlab.gridsphere.layout.PortletPage;
import org.gridlab.gridsphere.layout.PortletTab;
import org.gridlab.gridsphere.layout.PortletTabbedPane;
import org.gridlab.gridsphere.portlet.*;
import org.gridlab.gridsphere.portlet.impl.SportletUser;
import org.gridlab.gridsphere.portlet.service.PortletServiceException;
import org.gridlab.gridsphere.provider.event.FormEvent;
import org.gridlab.gridsphere.provider.portlet.ActionPortlet;
import org.gridlab.gridsphere.provider.portletui.beans.*;
import org.gridlab.gridsphere.services.core.layout.LayoutManagerService;
import org.gridlab.gridsphere.services.core.user.UserManagerService;
import javax.servlet.UnavailableException;
import java.io.IOException;
import java.util.*;
public class UserLayoutPortlet extends ActionPortlet {
// JSP pages used by this portlet
public static final String VIEW_JSP = "layout/view.jsp";
// Portlet services
private LayoutManagerService layoutMgr = null;
private UserManagerService userManagerService = null;
public void init(PortletConfig config) throws UnavailableException {
super.init(config);
try {
this.layoutMgr = (LayoutManagerService) config.getContext().getService(LayoutManagerService.class);
this.userManagerService = (UserManagerService) config.getContext().getService(UserManagerService.class);
} catch (PortletServiceException e) {
log.error("Unable to initialize services!", e);
}
DEFAULT_VIEW_PAGE = "doRender";
DEFAULT_HELP_PAGE = "layout/help.jsp";
}
public void initConcrete(PortletSettings settings) throws UnavailableException {
super.initConcrete(settings);
}
public void createNewTab(FormEvent event) throws PortletException, IOException {
String tabName = event.getTextFieldBean("userTabTF").getValue();
RadioButtonBean rb = event.getRadioButtonBean("colsRB");
String rbtype = rb.getSelectedValue();
int cols = Integer.valueOf(rbtype).intValue();
if (tabName == null) {
createErrorMessage(event, this.getLocalizedText(event.getPortletRequest(), "LAYOUT_NOTAB_ERROR"));
return;
}
log.debug("creating tab " + tabName + " cols= " + cols);
PortletTabbedPane pane = layoutMgr.getUserTabbedPane(event.getPortletRequest());
if (pane != null) {
Iterator it = pane.getPortletTabs().iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab)it.next();
if (tab.getLabel().equals(tabName + "Tab")) {
createErrorMessage(event, this.getLocalizedText(event.getPortletRequest(), "LAYOUT_SAMETAB_ERROR"));
return;
}
}
}
pane = layoutMgr.createUserTabbedPane(event.getPortletRequest(), cols, tabName);
PortletTab tab = pane.getLastPortletTab();
pane.save();
PortletPage page = layoutMgr.getPortletPage(event.getPortletRequest());
PortletTabbedPane mypane = page.getPortletTabbedPane();
List tabs = mypane.getPortletTabs();
tabs.add(tab);
Collections.sort(tabs, new PortletTab());
layoutMgr.reloadPage(event.getPortletRequest());
}
public void deleteTab(FormEvent event) throws IOException {
String label = event.getAction().getParameter("tabid");
PortletTabbedPane pane = layoutMgr.getUserTabbedPane(event.getPortletRequest());
List tabs = pane.getPortletTabs();
Iterator it = tabs.iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab) it.next();
if (tab.getLabel().equals(label)) {
it.remove();
}
}
pane.save();
PortletPage page = layoutMgr.getPortletPage(event.getPortletRequest());
PortletTabbedPane mypane = page.getPortletTabbedPane();
it = mypane.getPortletTabs().iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab) it.next();
if (tab.getLabel().equals(label)) {
it.remove();
}
}
layoutMgr.reloadPage(event.getPortletRequest());
}
public void saveTab(FormEvent event) throws IOException {
String lang = event.getPortletRequest().getLocale().getLanguage();
String name = event.getPortletRequest().getParameter("myTF");
String label = event.getAction().getParameter("tabid");
PortletTabbedPane pane = layoutMgr.getUserTabbedPane(event.getPortletRequest());
List tabs = pane.getPortletTabs();
Iterator it = tabs.iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab) it.next();
if (tab.getLabel().equals(label)) {
tab.setTitle(lang, name);
}
}
pane.save();
PortletPage page = layoutMgr.getPortletPage(event.getPortletRequest());
PortletTabbedPane mypane = page.getPortletTabbedPane();
it = mypane.getPortletTabs().iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab) it.next();
if (tab.getLabel().equals(label)) {
tab.setTitle(lang, name);
}
}
layoutMgr.reloadPage(event.getPortletRequest());
}
public void doRender(FormEvent event) {
PortletRequest req = event.getPortletRequest();
PortletResponse res = event.getPortletResponse();
req.setAttribute("lang", req.getLocale().getLanguage());
ListBoxBean themeLB = event.getListBoxBean("themeLB");
PortletPage page = layoutMgr.getPortletPage(req);
String theme = page.getTheme();
themeLB.clear();
String themes = getPortletSettings().getAttribute("supported-themes");
StringTokenizer st = new StringTokenizer(themes, ",");
while (st.hasMoreTokens()) {
ListBoxItemBean lb = new ListBoxItemBean();
String val = (String) st.nextElement();
lb.setValue(val.trim());
if (val.trim().equalsIgnoreCase(theme)) lb.setSelected(true);
themeLB.addBean(lb);
}
PortletTabbedPane pane = layoutMgr.getUserTabbedPane(req);
List tabs;
if (pane != null) {
tabs = pane.getPortletTabs();
} else {
tabs = new ArrayList();
}
req.setAttribute("tabs", tabs);
setNextState(req, VIEW_JSP);
}
public void saveTheme(FormEvent event) {
PortletRequest req = event.getPortletRequest();
ListBoxBean themeLB = event.getListBoxBean("themeLB");
String theme = themeLB.getSelectedValue();
User user = req.getUser();
SportletUser acctReq = userManagerService.editUser(user);
if (user != null) {
acctReq.setAttribute(User.THEME, theme);
userManagerService.saveUser(acctReq);
}
PortletPage page = layoutMgr.getPortletPage(req);
page.setTheme(theme);
layoutMgr.reloadPage(req);
}
private void createErrorMessage(FormEvent event, String msg) {
MessageBoxBean msgBox = event.getMessageBoxBean("msg");
msgBox.setMessageType(MessageStyle.MSG_ERROR);
msgBox.setValue(msg);
}
}
|
src/org/gridlab/gridsphere/portlets/core/layout/UserLayoutPortlet.java
|
/*
* @author <a href="mailto:michael.russell@aei.mpg.de">Michael Russell</a>
* @version $Id$
*/
package org.gridlab.gridsphere.portlets.core.layout;
import org.gridlab.gridsphere.layout.PortletPage;
import org.gridlab.gridsphere.layout.PortletTab;
import org.gridlab.gridsphere.layout.PortletTabbedPane;
import org.gridlab.gridsphere.portlet.*;
import org.gridlab.gridsphere.portlet.impl.SportletUser;
import org.gridlab.gridsphere.portlet.service.PortletServiceException;
import org.gridlab.gridsphere.provider.event.FormEvent;
import org.gridlab.gridsphere.provider.portlet.ActionPortlet;
import org.gridlab.gridsphere.provider.portletui.beans.*;
import org.gridlab.gridsphere.services.core.layout.LayoutManagerService;
import org.gridlab.gridsphere.services.core.user.UserManagerService;
import javax.servlet.UnavailableException;
import java.io.IOException;
import java.util.*;
public class UserLayoutPortlet extends ActionPortlet {
// JSP pages used by this portlet
public static final String VIEW_JSP = "layout/view.jsp";
// Portlet services
private LayoutManagerService layoutMgr = null;
private UserManagerService userManagerService = null;
public void init(PortletConfig config) throws UnavailableException {
super.init(config);
try {
this.layoutMgr = (LayoutManagerService) config.getContext().getService(LayoutManagerService.class);
this.userManagerService = (UserManagerService) config.getContext().getService(UserManagerService.class);
} catch (PortletServiceException e) {
log.error("Unable to initialize services!", e);
}
DEFAULT_VIEW_PAGE = "doRender";
DEFAULT_HELP_PAGE = "layout/help.jsp";
}
public void initConcrete(PortletSettings settings) throws UnavailableException {
super.initConcrete(settings);
}
public void createNewTab(FormEvent event) throws PortletException, IOException {
String tabName = event.getTextFieldBean("userTabTF").getValue();
//RadioButtonBean rb = event.getRadioButtonBean("colsRB");
//String rbtype = rb.getSelectedValue();
String rbtype = event.getPortletRequest().getParameter("colsRB");
int cols = Integer.valueOf(rbtype).intValue();
if (tabName == null) {
createErrorMessage(event, this.getLocalizedText(event.getPortletRequest(), "LAYOUT_NOTAB_ERROR"));
return;
}
log.debug("creating tab " + tabName + " cols= " + cols);
PortletTabbedPane pane = layoutMgr.getUserTabbedPane(event.getPortletRequest());
if (pane != null) {
Iterator it = pane.getPortletTabs().iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab)it.next();
if (tab.getLabel().equals(tabName + "Tab")) {
createErrorMessage(event, this.getLocalizedText(event.getPortletRequest(), "LAYOUT_SAMETAB_ERROR"));
return;
}
}
}
pane = layoutMgr.createUserTabbedPane(event.getPortletRequest(), cols, tabName);
PortletTab tab = pane.getLastPortletTab();
pane.save();
PortletPage page = layoutMgr.getPortletPage(event.getPortletRequest());
PortletTabbedPane mypane = page.getPortletTabbedPane();
List tabs = mypane.getPortletTabs();
tabs.add(tab);
Collections.sort(tabs, new PortletTab());
layoutMgr.reloadPage(event.getPortletRequest());
}
public void deleteTab(FormEvent event) throws IOException {
String label = event.getAction().getParameter("tabid");
PortletTabbedPane pane = layoutMgr.getUserTabbedPane(event.getPortletRequest());
List tabs = pane.getPortletTabs();
Iterator it = tabs.iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab) it.next();
if (tab.getLabel().equals(label)) {
it.remove();
}
}
pane.save();
PortletPage page = layoutMgr.getPortletPage(event.getPortletRequest());
PortletTabbedPane mypane = page.getPortletTabbedPane();
it = mypane.getPortletTabs().iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab) it.next();
if (tab.getLabel().equals(label)) {
it.remove();
}
}
layoutMgr.reloadPage(event.getPortletRequest());
}
public void saveTab(FormEvent event) throws IOException {
String lang = event.getPortletRequest().getLocale().getLanguage();
String name = event.getPortletRequest().getParameter("myTF");
String label = event.getAction().getParameter("tabid");
PortletTabbedPane pane = layoutMgr.getUserTabbedPane(event.getPortletRequest());
List tabs = pane.getPortletTabs();
Iterator it = tabs.iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab) it.next();
if (tab.getLabel().equals(label)) {
tab.setTitle(lang, name);
}
}
pane.save();
PortletPage page = layoutMgr.getPortletPage(event.getPortletRequest());
PortletTabbedPane mypane = page.getPortletTabbedPane();
it = mypane.getPortletTabs().iterator();
while (it.hasNext()) {
PortletTab tab = (PortletTab) it.next();
if (tab.getLabel().equals(label)) {
tab.setTitle(lang, name);
}
}
layoutMgr.reloadPage(event.getPortletRequest());
}
public void doRender(FormEvent event) {
PortletRequest req = event.getPortletRequest();
PortletResponse res = event.getPortletResponse();
req.setAttribute("lang", req.getLocale().getLanguage());
ListBoxBean themeLB = event.getListBoxBean("themeLB");
PortletPage page = layoutMgr.getPortletPage(req);
String theme = page.getTheme();
themeLB.clear();
String themes = getPortletSettings().getAttribute("supported-themes");
StringTokenizer st = new StringTokenizer(themes, ",");
while (st.hasMoreTokens()) {
ListBoxItemBean lb = new ListBoxItemBean();
String val = (String) st.nextElement();
lb.setValue(val.trim());
if (val.trim().equalsIgnoreCase(theme)) lb.setSelected(true);
themeLB.addBean(lb);
}
PortletTabbedPane pane = layoutMgr.getUserTabbedPane(req);
List tabs;
if (pane != null) {
tabs = pane.getPortletTabs();
} else {
tabs = new ArrayList();
}
req.setAttribute("tabs", tabs);
setNextState(req, VIEW_JSP);
}
public void saveTheme(FormEvent event) {
PortletRequest req = event.getPortletRequest();
ListBoxBean themeLB = event.getListBoxBean("themeLB");
String theme = themeLB.getSelectedValue();
User user = req.getUser();
SportletUser acctReq = userManagerService.editUser(user);
if (user != null) {
acctReq.setAttribute(User.THEME, theme);
userManagerService.saveUser(acctReq);
}
PortletPage page = layoutMgr.getPortletPage(req);
page.setTheme(theme);
layoutMgr.reloadPage(req);
}
private void createErrorMessage(FormEvent event, String msg) {
MessageBoxBean msgBox = event.getMessageBoxBean("msg");
msgBox.setMessageType(MessageStyle.MSG_ERROR);
msgBox.setValue(msg);
}
}
|
use radiobutton as bean
git-svn-id: 616481d960d639df1c769687dde8737486ca2a9a@3557 9c99c85f-4d0c-0410-8460-a9a1c48a3a7f
|
src/org/gridlab/gridsphere/portlets/core/layout/UserLayoutPortlet.java
|
use radiobutton as bean
|
|
Java
|
apache-2.0
|
d90e19183779b109db8591982092c053d19ca361
| 0
|
apache/cordova-android,apache/cordova-android,apache/cordova-android
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova.engine;
import java.util.Arrays;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.net.Uri;
import android.os.Build;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.webkit.ConsoleMessage;
import android.webkit.GeolocationPermissions.Callback;
import android.webkit.JsPromptResult;
import android.webkit.JsResult;
import android.webkit.ValueCallback;
import android.webkit.WebChromeClient;
import android.webkit.WebStorage;
import android.webkit.WebView;
import android.webkit.PermissionRequest;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import org.apache.cordova.CordovaDialogsHelper;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.LOG;
/**
* This class is the WebChromeClient that implements callbacks for our web view.
* The kind of callbacks that happen here are on the chrome outside the document,
* such as onCreateWindow(), onConsoleMessage(), onProgressChanged(), etc. Related
* to but different than CordovaWebViewClient.
*/
public class SystemWebChromeClient extends WebChromeClient {
private static final int FILECHOOSER_RESULTCODE = 5173;
private static final String LOG_TAG = "SystemWebChromeClient";
private long MAX_QUOTA = 100 * 1024 * 1024;
protected final SystemWebViewEngine parentEngine;
// the video progress view
private View mVideoProgressView;
private CordovaDialogsHelper dialogsHelper;
private Context appContext;
private WebChromeClient.CustomViewCallback mCustomViewCallback;
private View mCustomView;
public SystemWebChromeClient(SystemWebViewEngine parentEngine) {
this.parentEngine = parentEngine;
appContext = parentEngine.webView.getContext();
dialogsHelper = new CordovaDialogsHelper(appContext);
}
/**
* Tell the client to display a javascript alert dialog.
*/
@Override
public boolean onJsAlert(WebView view, String url, String message, final JsResult result) {
dialogsHelper.showAlert(message, new CordovaDialogsHelper.Result() {
@Override public void gotResult(boolean success, String value) {
if (success) {
result.confirm();
} else {
result.cancel();
}
}
});
return true;
}
/**
* Tell the client to display a confirm dialog to the user.
*/
@Override
public boolean onJsConfirm(WebView view, String url, String message, final JsResult result) {
dialogsHelper.showConfirm(message, new CordovaDialogsHelper.Result() {
@Override
public void gotResult(boolean success, String value) {
if (success) {
result.confirm();
} else {
result.cancel();
}
}
});
return true;
}
/**
* Tell the client to display a prompt dialog to the user.
* If the client returns true, WebView will assume that the client will
* handle the prompt dialog and call the appropriate JsPromptResult method.
*
* Since we are hacking prompts for our own purposes, we should not be using them for
* this purpose, perhaps we should hack console.log to do this instead!
*/
@Override
public boolean onJsPrompt(WebView view, String origin, String message, String defaultValue, final JsPromptResult result) {
// Unlike the @JavascriptInterface bridge, this method is always called on the UI thread.
String handledRet = parentEngine.bridge.promptOnJsPrompt(origin, message, defaultValue);
if (handledRet != null) {
result.confirm(handledRet);
} else {
dialogsHelper.showPrompt(message, defaultValue, new CordovaDialogsHelper.Result() {
@Override
public void gotResult(boolean success, String value) {
if (success) {
result.confirm(value);
} else {
result.cancel();
}
}
});
}
return true;
}
/**
* Handle database quota exceeded notification.
*/
@Override
@SuppressWarnings("deprecation")
public void onExceededDatabaseQuota(String url, String databaseIdentifier, long currentQuota, long estimatedSize,
long totalUsedQuota, WebStorage.QuotaUpdater quotaUpdater)
{
LOG.d(LOG_TAG, "onExceededDatabaseQuota estimatedSize: %d currentQuota: %d totalUsedQuota: %d", estimatedSize, currentQuota, totalUsedQuota);
quotaUpdater.updateQuota(MAX_QUOTA);
}
@Override
/**
* Instructs the client to show a prompt to ask the user to set the Geolocation permission state for the specified origin.
*
* This also checks for the Geolocation Plugin and requests permission from the application to use Geolocation.
*
* @param origin
* @param callback
*/
public void onGeolocationPermissionsShowPrompt(String origin, Callback callback) {
super.onGeolocationPermissionsShowPrompt(origin, callback);
callback.invoke(origin, true, false);
//Get the plugin, it should be loaded
CordovaPlugin geolocation = parentEngine.pluginManager.getPlugin("Geolocation");
if(geolocation != null && !geolocation.hasPermisssion())
{
geolocation.requestPermissions(0);
}
}
// API level 7 is required for this, see if we could lower this using something else
@Override
@SuppressWarnings("deprecation")
public void onShowCustomView(View view, WebChromeClient.CustomViewCallback callback) {
parentEngine.getCordovaWebView().showCustomView(view, callback);
}
@Override
@SuppressWarnings("deprecation")
public void onHideCustomView() {
parentEngine.getCordovaWebView().hideCustomView();
}
@Override
/**
* Ask the host application for a custom progress view to show while
* a <video> is loading.
* @return View The progress view.
*/
public View getVideoLoadingProgressView() {
if (mVideoProgressView == null) {
// Create a new Loading view programmatically.
// create the linear layout
LinearLayout layout = new LinearLayout(parentEngine.getView().getContext());
layout.setOrientation(LinearLayout.VERTICAL);
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
layoutParams.addRule(RelativeLayout.CENTER_IN_PARENT);
layout.setLayoutParams(layoutParams);
// the proress bar
ProgressBar bar = new ProgressBar(parentEngine.getView().getContext());
LinearLayout.LayoutParams barLayoutParams = new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
barLayoutParams.gravity = Gravity.CENTER;
bar.setLayoutParams(barLayoutParams);
layout.addView(bar);
mVideoProgressView = layout;
}
return mVideoProgressView;
}
@Override
public boolean onShowFileChooser(WebView webView, final ValueCallback<Uri[]> filePathsCallback, final WebChromeClient.FileChooserParams fileChooserParams) {
// Check if multiple-select is specified
Boolean selectMultiple = false;
if (fileChooserParams.getMode() == WebChromeClient.FileChooserParams.MODE_OPEN_MULTIPLE) {
selectMultiple = true;
}
Intent intent = fileChooserParams.createIntent();
intent.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, selectMultiple);
// Uses Intent.EXTRA_MIME_TYPES to pass multiple mime types.
String[] acceptTypes = fileChooserParams.getAcceptTypes();
if (acceptTypes.length > 1) {
intent.setType("*/*"); // Accept all, filter mime types by Intent.EXTRA_MIME_TYPES.
intent.putExtra(Intent.EXTRA_MIME_TYPES, acceptTypes);
}
try {
parentEngine.cordova.startActivityForResult(new CordovaPlugin() {
@Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
Uri[] result = null;
if (resultCode == Activity.RESULT_OK && intent != null) {
if (intent.getClipData() != null) {
// handle multiple-selected files
final int numSelectedFiles = intent.getClipData().getItemCount();
result = new Uri[numSelectedFiles];
for (int i = 0; i < numSelectedFiles; i++) {
result[i] = intent.getClipData().getItemAt(i).getUri();
LOG.d(LOG_TAG, "Receive file chooser URL: " + result[i]);
}
}
else if (intent.getData() != null) {
// handle single-selected file
result = WebChromeClient.FileChooserParams.parseResult(resultCode, intent);
LOG.d(LOG_TAG, "Receive file chooser URL: " + result);
}
}
filePathsCallback.onReceiveValue(result);
}
}, intent, FILECHOOSER_RESULTCODE);
} catch (ActivityNotFoundException e) {
LOG.w("No activity found to handle file chooser intent.", e);
filePathsCallback.onReceiveValue(null);
}
return true;
}
@Override
public void onPermissionRequest(final PermissionRequest request) {
LOG.d(LOG_TAG, "onPermissionRequest: " + Arrays.toString(request.getResources()));
request.grant(request.getResources());
}
public void destroyLastDialog(){
dialogsHelper.destroyLastDialog();
}
}
|
framework/src/org/apache/cordova/engine/SystemWebChromeClient.java
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova.engine;
import java.util.Arrays;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.net.Uri;
import android.os.Build;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.webkit.ConsoleMessage;
import android.webkit.GeolocationPermissions.Callback;
import android.webkit.JsPromptResult;
import android.webkit.JsResult;
import android.webkit.ValueCallback;
import android.webkit.WebChromeClient;
import android.webkit.WebStorage;
import android.webkit.WebView;
import android.webkit.PermissionRequest;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import org.apache.cordova.CordovaDialogsHelper;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.LOG;
/**
* This class is the WebChromeClient that implements callbacks for our web view.
* The kind of callbacks that happen here are on the chrome outside the document,
* such as onCreateWindow(), onConsoleMessage(), onProgressChanged(), etc. Related
* to but different than CordovaWebViewClient.
*/
public class SystemWebChromeClient extends WebChromeClient {
private static final int FILECHOOSER_RESULTCODE = 5173;
private static final String LOG_TAG = "SystemWebChromeClient";
private long MAX_QUOTA = 100 * 1024 * 1024;
protected final SystemWebViewEngine parentEngine;
// the video progress view
private View mVideoProgressView;
private CordovaDialogsHelper dialogsHelper;
private Context appContext;
private WebChromeClient.CustomViewCallback mCustomViewCallback;
private View mCustomView;
public SystemWebChromeClient(SystemWebViewEngine parentEngine) {
this.parentEngine = parentEngine;
appContext = parentEngine.webView.getContext();
dialogsHelper = new CordovaDialogsHelper(appContext);
}
/**
* Tell the client to display a javascript alert dialog.
*/
@Override
public boolean onJsAlert(WebView view, String url, String message, final JsResult result) {
dialogsHelper.showAlert(message, new CordovaDialogsHelper.Result() {
@Override public void gotResult(boolean success, String value) {
if (success) {
result.confirm();
} else {
result.cancel();
}
}
});
return true;
}
/**
* Tell the client to display a confirm dialog to the user.
*/
@Override
public boolean onJsConfirm(WebView view, String url, String message, final JsResult result) {
dialogsHelper.showConfirm(message, new CordovaDialogsHelper.Result() {
@Override
public void gotResult(boolean success, String value) {
if (success) {
result.confirm();
} else {
result.cancel();
}
}
});
return true;
}
/**
* Tell the client to display a prompt dialog to the user.
* If the client returns true, WebView will assume that the client will
* handle the prompt dialog and call the appropriate JsPromptResult method.
*
* Since we are hacking prompts for our own purposes, we should not be using them for
* this purpose, perhaps we should hack console.log to do this instead!
*/
@Override
public boolean onJsPrompt(WebView view, String origin, String message, String defaultValue, final JsPromptResult result) {
// Unlike the @JavascriptInterface bridge, this method is always called on the UI thread.
String handledRet = parentEngine.bridge.promptOnJsPrompt(origin, message, defaultValue);
if (handledRet != null) {
result.confirm(handledRet);
} else {
dialogsHelper.showPrompt(message, defaultValue, new CordovaDialogsHelper.Result() {
@Override
public void gotResult(boolean success, String value) {
if (success) {
result.confirm(value);
} else {
result.cancel();
}
}
});
}
return true;
}
/**
* Handle database quota exceeded notification.
*/
@Override
@SuppressWarnings("deprecation")
public void onExceededDatabaseQuota(String url, String databaseIdentifier, long currentQuota, long estimatedSize,
long totalUsedQuota, WebStorage.QuotaUpdater quotaUpdater)
{
LOG.d(LOG_TAG, "onExceededDatabaseQuota estimatedSize: %d currentQuota: %d totalUsedQuota: %d", estimatedSize, currentQuota, totalUsedQuota);
quotaUpdater.updateQuota(MAX_QUOTA);
}
@Override
/**
* Instructs the client to show a prompt to ask the user to set the Geolocation permission state for the specified origin.
*
* This also checks for the Geolocation Plugin and requests permission from the application to use Geolocation.
*
* @param origin
* @param callback
*/
public void onGeolocationPermissionsShowPrompt(String origin, Callback callback) {
super.onGeolocationPermissionsShowPrompt(origin, callback);
callback.invoke(origin, true, false);
//Get the plugin, it should be loaded
CordovaPlugin geolocation = parentEngine.pluginManager.getPlugin("Geolocation");
if(geolocation != null && !geolocation.hasPermisssion())
{
geolocation.requestPermissions(0);
}
}
// API level 7 is required for this, see if we could lower this using something else
@Override
@SuppressWarnings("deprecation")
public void onShowCustomView(View view, WebChromeClient.CustomViewCallback callback) {
parentEngine.getCordovaWebView().showCustomView(view, callback);
}
@Override
@SuppressWarnings("deprecation")
public void onHideCustomView() {
parentEngine.getCordovaWebView().hideCustomView();
}
@Override
/**
* Ask the host application for a custom progress view to show while
* a <video> is loading.
* @return View The progress view.
*/
public View getVideoLoadingProgressView() {
if (mVideoProgressView == null) {
// Create a new Loading view programmatically.
// create the linear layout
LinearLayout layout = new LinearLayout(parentEngine.getView().getContext());
layout.setOrientation(LinearLayout.VERTICAL);
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
layoutParams.addRule(RelativeLayout.CENTER_IN_PARENT);
layout.setLayoutParams(layoutParams);
// the proress bar
ProgressBar bar = new ProgressBar(parentEngine.getView().getContext());
LinearLayout.LayoutParams barLayoutParams = new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
barLayoutParams.gravity = Gravity.CENTER;
bar.setLayoutParams(barLayoutParams);
layout.addView(bar);
mVideoProgressView = layout;
}
return mVideoProgressView;
}
@Override
public boolean onShowFileChooser(WebView webView, final ValueCallback<Uri[]> filePathsCallback, final WebChromeClient.FileChooserParams fileChooserParams) {
// Check if multiple-select is specified
Boolean selectMultiple = false;
if (fileChooserParams.getMode() == WebChromeClient.FileChooserParams.MODE_OPEN_MULTIPLE) {
selectMultiple = true;
}
Intent intent = fileChooserParams.createIntent();
intent.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, selectMultiple);
try {
parentEngine.cordova.startActivityForResult(new CordovaPlugin() {
@Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
Uri[] result = null;
if (resultCode == Activity.RESULT_OK && intent != null) {
if (intent.getClipData() != null) {
// handle multiple-selected files
final int numSelectedFiles = intent.getClipData().getItemCount();
result = new Uri[numSelectedFiles];
for (int i = 0; i < numSelectedFiles; i++) {
result[i] = intent.getClipData().getItemAt(i).getUri();
LOG.d(LOG_TAG, "Receive file chooser URL: " + result[i]);
}
}
else if (intent.getData() != null) {
// handle single-selected file
result = WebChromeClient.FileChooserParams.parseResult(resultCode, intent);
LOG.d(LOG_TAG, "Receive file chooser URL: " + result);
}
}
filePathsCallback.onReceiveValue(result);
}
}, intent, FILECHOOSER_RESULTCODE);
} catch (ActivityNotFoundException e) {
LOG.w("No activity found to handle file chooser intent.", e);
filePathsCallback.onReceiveValue(null);
}
return true;
}
@Override
public void onPermissionRequest(final PermissionRequest request) {
LOG.d(LOG_TAG, "onPermissionRequest: " + Arrays.toString(request.getResources()));
request.grant(request.getResources());
}
public void destroyLastDialog(){
dialogsHelper.destroyLastDialog();
}
}
|
Accept multiple mime types on file input (#971)
|
framework/src/org/apache/cordova/engine/SystemWebChromeClient.java
|
Accept multiple mime types on file input (#971)
|
|
Java
|
apache-2.0
|
4885cdf83f6b8780e3ca721dc9160fca58b908ea
| 0
|
nixplay/cordova-plugin-local-notifications,rastreabilidadebrasil/cordova-plugin-local-notifications,dpalou/cordova-plugin-local-notifications,de-monkeyz/cordova-local-notifications,katzer/cordova-plugin-local-notifications,SpoonX/cordova-plugin-local-notifications,mikeljames/cordova-plugin-local-notifications,willyboy/cordova-plugin-local-notifications,aminecmi/cordova-plugin-local-notifications,Salesfloor/cordova-plugin-local-notifications,MrAntix/cordova-plugin-local-notifications,advisory/forked--cordova-plugin-local-notifications,EddyVerbruggen/cordova-plugin-local-notifications,sxagan/cordova-plugin-local-notifications-test,veloek/cordova-plugin-local-notifications,ghsyeung/cordova-plugin-local-notifications,simensma/cordova-plugin-local-notifications,tomas-v/cordova-plugin-local-notifications,sxagan/cordova-plugin-local-notifications,darthdie/cordova-plugin-local-notifications,hypermurea/cordova-plugin-local-notifications,henriquecastro/cordova-plugin-local-notifications,kinokino2010/cordova-plugin-local-notifications,Elijax/cordova-plugin-local-notifications,pierotofy/cordova-plugin-local-notifications,de-monkeyz/cordova-local-notifications,castrdev/cordova-plugin-local-notifications,vxcamiloxv/cordova-plugin-local-notifications,divineprog/cordova-plugin-local-notifications,mikeljames/cordova-plugin-local-notifications,mpm/cordova-plugin-local-notifications,mikaelkindborg/cordova-plugin-local-notifications,jck000/cordova-plugin-local-notifications,zdd910/cordova-plugin-local-notifications,dansanti/cordova-plugin-local-notifications,ochakov/cordova-plugin-local-notifications,etiennea/cordova-plugin-local-notifications,ElieSauveterre/cordova-plugin-local-notifications,jtomaszewski/cordova-plugin-local-notifications,Intuitisoft/cordova-plugin-local-notifications,elizabethrego/cordova-plugin-local-notifications,sxagan/cordova-plugin-local-notification-ex1,katzer/cordova-plugin-local-notifications,hypermurea/cordova-plugin-local-notifications,Elijax/cordova-plugin-local-notifications,alexkuttig/cordova-plugin-local-notifications-test,elizabethrego/cordova-plugin-local-notifications-actions,dpalou/cordova-plugin-local-notifications,joidegn/cordova-plugin-local-notifications,nixplay/cordova-plugin-local-notifications,ochakov/cordova-plugin-local-notifications,viniciusgandrade/cordova-plugin-local-notifications,SpoonX/cordova-plugin-local-notifications,henning-cg/cordova-plugin-local-notifications,imskull/cordova-plugin-local-notifications,Critical-Impact/cordova-plugin-local-notifications,jbyrdevans/cordova-plugin-local-notifications,Telerik-Verified-Plugins/LocalNotification,bijeshp/cordova-plugin-local-notifications,etiennea/cordova-plugin-local-notifications,rastreabilidadebrasil/cordova-plugin-local-notifications,advisory/forked--cordova-plugin-local-notifications,kaziridwan/cordova-plugin-local-notifications,starmobiledevelopers/cordova-plugin-local-notifications,insfere/cordova-plugin-local-notifications,henriquecastro/cordova-plugin-local-notifications,mircoc/cordova-plugin-local-notifications,protonet/cordova-plugin-local-notifications,dpalou/cordova-plugin-local-notifications,bamlab/cordova-plugin-local-notifications,starmobiledevelopers/cordova-plugin-local-notifications,viniciusgandrade/cordova-plugin-local-notifications,mbarzeev/cordova-plugin-local-notifications,veloek/cordova-plugin-local-notifications,Japskua/cordova-plugin-local-notifications,vxcamiloxv/cordova-plugin-local-notifications,MrAntix/cordova-plugin-local-notifications,evothings/cordova-plugin-local-notifications,Intuitisoft/cordova-plugin-local-notifications,Muffasa/cordova-plugin-local-notifications,krishlakshmanan/cordova-plugin-local-notifications,mix3d/cordova-plugin-local-notifications,sxagan/cordova-plugin-local-notifications-newFork,danlg/LocalNotification,retailos/cordova-plugin-local-notifications,miraculixx/cordova-plugin-local-notifications,Anu2g/cordova-plugin-local-notifications,sxagan/cordova-plugin-local-notifications-test,TheAmbitGroup/cordova-plugin-local-notifications,chemerisuk/cordova-plugin-local-notifications,kashifzaidi1/cordova-plugin-local-notifications,castrdev/cordova-plugin-local-notifications,khejing/cordova-plugin-local-notifications,pierotofy/cordova-plugin-local-notifications,bluebile/cordova-plugin-local-notifications,makinacorpus/cordova-plugin-local-notifications,miraculixx/cordova-plugin-local-notifications,ghsyeung/cordova-plugin-local-notifications,alexkuttig/cordova-plugin-local-notifications-test,Geotab/cordova-plugin-local-notifications,mgerlach-klick/cordova-plugin-local-notifications,aorgish/cordova-plugin-local-notifications,DayBr3ak/cordova-plugin-local-notifications,Salesfloor/cordova-plugin-local-notifications,mgerlach-klick/cordova-plugin-local-notifications,sxagan/cordova-plugin-local-notifications-android,zxshinxz/cordova-plugin-local-notifications,imskull/cordova-plugin-local-notifications,TranceLove/cordova-plugin-local-notifications,simensma/cordova-plugin-local-notifications,Geotab/cordova-plugin-local-notifications,AttackTheDarkness/cordova-plugin-local-notifications,makinacorpus/cordova-plugin-local-notifications,extendedmind/cordova-plugin-local-notifications,mix3d/cordova-plugin-local-notifications,mbarzeev/cordova-plugin-local-notifications,tomas-v/cordova-plugin-local-notifications,EddyVerbruggen/cordova-plugin-local-notifications,Critical-Impact/cordova-plugin-local-notifications,mikaelkindborg/cordova-plugin-local-notifications,akephonegap/cordova-plugin-local-notifications,Elijax/cordova-plugin-local-notifications,sxagan/cordova-plugin-hlpush,TheAmbitGroup/cordova-plugin-local-notifications,extendedmind/cordova-plugin-local-notifications,evothings/cordova-plugin-local-notifications,Miguel222tm/cordova-plugin-local-notifications,hasandogu/cordova-plugin-local-notifications,sxagan/cordova-plugin-local-notification-ex1,sxagan/cordova-plugin-local-notifications,retailos/cordova-plugin-local-notifications,sysfolko/sysfolko-de.appplant.cordova.plugin.local-notification-sysfo-fix,akephonegap/cordova-plugin-local-notifications,akreienbring/cordova-plugin-local-notifications,sxagan/cordova-plugin-hlpush,AttackTheDarkness/cordova-plugin-local-notifications,sxagan/cordova-plugin-local-notifications-newFork,ebulay/cordova-plugin-local-notifications,zdd910/cordova-plugin-local-notifications,realpasro09/cordova-plugin-local-notifications,viniciusgandrade/cordova-plugin-local-notifications,aminecmi/cordova-plugin-local-notifications-old,insfere/cordova-plugin-local-notifications,OzConseil/cordova-plugin-local-notifications,Japskua/cordova-plugin-local-notifications,hasandogu/cordova-plugin-local-notifications,mosalingua/cordova-plugin-local-notifications,blanedaze/cordova-plugin-local-notifications,stormwin/cordova-plugin-local-notifications,Mobii/cordova-plugin-local-notifications,AbstractH/cordova-plugin-local-notifications,blanedaze/cordova-plugin-local-notifications,akreienbring/cordova-plugin-local-notifications,jtomaszewski/cordova-plugin-local-notifications,sdharnasi/LocalNotification,linxd5/cordova-plugin-local-notifications,gennadys/cordova-plugin-local-notifications,mcchrish/cordova-plugin-local-notifications,aminecmi/cordova-plugin-local-notifications-old,Anu2g/cordova-plugin-local-notifications,bluebile/cordova-plugin-local-notifications,sdharnasi/LocalNotification,vkeepe/cordova-plugin-local-notifications,katzer/cordova-plugin-local-notifications,gennadys/cordova-plugin-local-notifications,hagabay/cordova-plugin-local-notifications,protonet/cordova-plugin-local-notifications,ecthurlow/cordova-plugin-local-notifications,zxshinxz/cordova-plugin-local-notifications,protonet/cordova-plugin-local-notifications,mosalingua/cordova-plugin-local-notifications,divineprog/cordova-plugin-local-notifications,linxd5/cordova-plugin-local-notifications,kashifzaidi1/cordova-plugin-local-notifications,realpasro09/cordova-plugin-local-notifications,AbstractH/cordova-plugin-local-notifications,tomas-v/cordova-plugin-local-notifications,mgerlach-klick/cordova-plugin-local-notifications,willyboy/cordova-plugin-local-notifications,advisory/forked--cordova-plugin-local-notifications,ochakov/cordova-plugin-local-notifications,Muffasa/cordova-plugin-local-notifications,ecthurlow/cordova-plugin-local-notifications,mcchrish/cordova-plugin-local-notifications,vkeepe/cordova-plugin-local-notifications,Geotab/cordova-plugin-local-notifications,danlg/LocalNotification,kinokino2010/cordova-plugin-local-notifications,OzConseil/cordova-plugin-local-notifications,TranceLove/cordova-plugin-local-notifications,henning-cg/cordova-plugin-local-notifications,joidegn/cordova-plugin-local-notifications,Telerik-Verified-Plugins/LocalNotification,jck000/cordova-plugin-local-notifications,elizabethrego/cordova-plugin-local-notifications-actions,Critical-Impact/cordova-plugin-local-notifications,vkeepe/cordova-plugin-local-notifications,castrdev/cordova-plugin-local-notifications,hasandogu/cordova-plugin-local-notifications,aorgish/cordova-plugin-local-notifications,sxagan/cordova-plugin-local-notifications-android,Eusebius1920/cordova-plugin-local-notifications,mircoc/cordova-plugin-local-notifications,chemerisuk/cordova-plugin-local-notifications,alexkuttig/cordova-plugin-local-notifications-test,Skysurfer0110/LocalNotification,mosalingua/cordova-plugin-local-notifications,bamlab/cordova-plugin-local-notifications,Eusebius1920/cordova-plugin-local-notifications,darthdie/cordova-plugin-local-notifications,khejing/cordova-plugin-local-notifications,krishlakshmanan/cordova-plugin-local-notifications,Mobii/cordova-plugin-local-notifications,Skysurfer0110/LocalNotification,bijeshp/cordova-plugin-local-notifications,ebulay/cordova-plugin-local-notifications,dansanti/cordova-plugin-local-notifications,mpm/cordova-plugin-local-notifications,kaziridwan/cordova-plugin-local-notifications,hagabay/cordova-plugin-local-notifications,DayBr3ak/cordova-plugin-local-notifications,stormwin/cordova-plugin-local-notifications,bluebile/cordova-plugin-local-notifications,hypermurea/cordova-plugin-local-notifications,ElieSauveterre/cordova-plugin-local-notifications,Miguel222tm/cordova-plugin-local-notifications,de-monkeyz/cordova-local-notifications,sxagan/cordova-plugin-local-notifications-test,elizabethrego/cordova-plugin-local-notifications,jbyrdevans/cordova-plugin-local-notifications,sysfolko/sysfolko-de.appplant.cordova.plugin.local-notification-sysfo-fix,aminecmi/cordova-plugin-local-notifications
|
/*
Copyright 2013-2014 appPlant UG
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package de.appplant.cordova.plugin.localnotification;
import java.util.ArrayList;
import java.util.Map;
import java.util.Set;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaInterface;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaWebView;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.ActivityManager;
import android.app.AlarmManager;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
/**
* This plugin utilizes the Android AlarmManager in combination with StatusBar
* notifications. When a local notification is scheduled the alarm manager takes
* care of firing the event. When the event is processed, a notification is put
* in the Android status bar.
*/
public class LocalNotification extends CordovaPlugin {
protected final static String PLUGIN_NAME = "LocalNotification";
private static CordovaWebView webView = null;
protected static Context context = null;
private static ArrayList<String> callbackQueue = new ArrayList<String>();
@Override
public void initialize (CordovaInterface cordova, CordovaWebView webView) {
super.initialize(cordova, webView);
LocalNotification.webView = super.webView;
LocalNotification.context = super.cordova.getActivity().getApplicationContext();
execPendingCallbacks();
}
@Override
public boolean execute (String action, final JSONArray args, CallbackContext callbackContext) throws JSONException {
if (action.equalsIgnoreCase("add")) {
cordova.getThreadPool().execute( new Runnable() {
public void run() {
JSONObject arguments = args.optJSONObject(0);
Options options = new Options(context).parse(arguments);
persist(options.getId(), args);
add(options);
}
});
return true;
}
if (action.equalsIgnoreCase("cancel")) {
cordova.getThreadPool().execute( new Runnable() {
public void run() {
String id = args.optString(0);
cancel(id);
unpersist(id);
}
});
return true;
}
if (action.equalsIgnoreCase("cancelAll")) {
cordova.getThreadPool().execute( new Runnable() {
public void run() {
cancelAll();
unpersistAll();
}
});
return true;
}
// Returning false results in a "MethodNotFound" error.
return false;
}
/**
* Set an alarm.
*
* @param options
* The options that can be specified per alarm.
*/
public static void add (Options options) {
long triggerTime = options.getDate();
Intent intent = new Intent(context, Receiver.class)
.setAction("" + options.getId())
.putExtra(Receiver.OPTIONS, options.getJSONObject().toString());
AlarmManager am = getAlarmManager();
PendingIntent pi = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_CANCEL_CURRENT);
fireEvent("add", options.getId(), options.getJSON());
am.set(AlarmManager.RTC_WAKEUP, triggerTime, pi);
}
/**
* Cancel a specific notification that was previously registered.
*
* @param notificationId
* The original ID of the notification that was used when it was
* registered using add()
*/
public static void cancel (String notificationId) {
/*
* Create an intent that looks similar, to the one that was registered
* using add. Making sure the notification id in the action is the same.
* Now we can search for such an intent using the 'getService' method
* and cancel it.
*/
Intent intent = new Intent(context, Receiver.class)
.setAction("" + notificationId);
PendingIntent pi = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_CANCEL_CURRENT);
AlarmManager am = getAlarmManager();
NotificationManager nc = getNotificationManager();
am.cancel(pi);
try {
nc.cancel(Integer.parseInt(notificationId));
} catch (Exception e) {}
fireEvent("cancel", notificationId, "");
}
/**
* Cancel all notifications that were created by this plugin.
*
* Android can only unregister a specific alarm. There is no such thing
* as cancelAll. Therefore we rely on the Shared Preferences which holds
* all our alarms to loop through these alarms and unregister them one
* by one.
*/
public static void cancelAll() {
SharedPreferences settings = getSharedPreferences();
NotificationManager nc = getNotificationManager();
Map<String, ?> alarms = settings.getAll();
Set<String> alarmIds = alarms.keySet();
for (String alarmId : alarmIds) {
cancel(alarmId);
}
nc.cancelAll();
}
/**
* Persist the information of this alarm to the Android Shared Preferences.
* This will allow the application to restore the alarm upon device reboot.
* Also this is used by the cancelAll method.
*
* @param alarmId
* The Id of the notification that must be persisted.
* @param args
* The assumption is that parse has been called already.
*/
public static void persist (String alarmId, JSONArray args) {
Editor editor = getSharedPreferences().edit();
if (alarmId != null) {
editor.putString(alarmId, args.toString());
editor.commit();
}
}
/**
* Remove a specific alarm from the Android shared Preferences.
*
* @param alarmId
* The Id of the notification that must be removed.
*/
public static void unpersist (String alarmId) {
Editor editor = getSharedPreferences().edit();
if (alarmId != null) {
editor.remove(alarmId);
editor.commit();
}
}
/**
* Clear all alarms from the Android shared Preferences.
*/
public static void unpersistAll () {
Editor editor = getSharedPreferences().edit();
editor.clear();
editor.commit();
}
/**
* Fires the given event.
*
* @param {String} event The Name of the event
* @param {String} id The ID of the notification
* @param {String} json A custom (JSON) string
*/
public static void fireEvent (String event, String id, String json) {
String state = isInBackground() ? "background" : "foreground";
String params = "\"" + id + "\",\"" + state + "\",\\'" + JSONObject.quote(json) + "\\'.replace(/(^\"|\"$)/g, \\'\\')";
String js = "setTimeout('plugin.notification.local.on" + event + "(" + params + ")',0)";
// after reboot, LocalNotification.webView is always be null
// call background callback later
if (webView == null) {
callbackQueue.add(js);
} else {
webView.sendJavascript(js);
}
}
/**
* Set the application context if not already set.
*/
protected static void setContext (Context context) {
if (LocalNotification.context == null) {
LocalNotification.context = context;
}
}
/**
* The Local storage for the application.
*/
protected static SharedPreferences getSharedPreferences () {
return context.getSharedPreferences(PLUGIN_NAME, Context.MODE_PRIVATE);
}
/**
* The alarm manager for the application.
*/
protected static AlarmManager getAlarmManager () {
return (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
}
/**
* The notification manager for the application.
*/
protected static NotificationManager getNotificationManager () {
return (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
}
/**
* Gibt an, ob die App im Hintergrund läuft.
*/
private static boolean isInBackground () {
try {
return !context.getPackageName().equalsIgnoreCase(((ActivityManager)context.getSystemService(Context.ACTIVITY_SERVICE)).getRunningTasks(1).get(0).topActivity.getPackageName());
} catch (Exception e) {
return true;
}
}
/**
* Calls all pending callbacks after the webview was created.
*/
private void execPendingCallbacks () {
for (String js : callbackQueue) {
webView.sendJavascript(js);
}
callbackQueue.clear();
}
}
|
src/android/LocalNotification.java
|
/*
Copyright 2013-2014 appPlant UG
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package de.appplant.cordova.plugin.localnotification;
import java.util.ArrayList;
import java.util.Map;
import java.util.Set;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaInterface;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaWebView;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.ActivityManager;
import android.app.AlarmManager;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
/**
* This plugin utilizes the Android AlarmManager in combination with StatusBar
* notifications. When a local notification is scheduled the alarm manager takes
* care of firing the event. When the event is processed, a notification is put
* in the Android status bar.
*/
public class LocalNotification extends CordovaPlugin {
protected final static String PLUGIN_NAME = "LocalNotification";
private static CordovaWebView webView = null;
protected static Context context = null;
private static ArrayList<String> callbackQueue = new ArrayList<String>();
@Override
public void initialize (CordovaInterface cordova, CordovaWebView webView) {
super.initialize(cordova, webView);
LocalNotification.webView = super.webView;
LocalNotification.context = super.cordova.getActivity().getApplicationContext();
execPendingCallbacks();
}
@Override
public boolean execute (String action, JSONArray args, CallbackContext callbackContext) throws JSONException {
if (action.equalsIgnoreCase("add")) {
JSONObject arguments = args.optJSONObject(0);
final Options options = new Options(context).parse(arguments);
persist(options.getId(), args);
cordova.getThreadPool().execute( new Runnable() {
public void run() {
add(options);
}
});
return true;
}
if (action.equalsIgnoreCase("cancel")) {
String id = args.optString(0);
cancel(id);
unpersist(id);
return true;
}
if (action.equalsIgnoreCase("cancelAll")) {
cancelAll();
unpersistAll();
return true;
}
// Returning false results in a "MethodNotFound" error.
return false;
}
/**
* Set an alarm.
*
* @param options
* The options that can be specified per alarm.
*/
public static void add (Options options) {
long triggerTime = options.getDate();
Intent intent = new Intent(context, Receiver.class)
.setAction("" + options.getId())
.putExtra(Receiver.OPTIONS, options.getJSONObject().toString());
AlarmManager am = getAlarmManager();
PendingIntent pi = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_CANCEL_CURRENT);
fireEvent("add", options.getId(), options.getJSON());
am.set(AlarmManager.RTC_WAKEUP, triggerTime, pi);
}
/**
* Cancel a specific notification that was previously registered.
*
* @param notificationId
* The original ID of the notification that was used when it was
* registered using add()
*/
public static void cancel (String notificationId) {
/*
* Create an intent that looks similar, to the one that was registered
* using add. Making sure the notification id in the action is the same.
* Now we can search for such an intent using the 'getService' method
* and cancel it.
*/
Intent intent = new Intent(context, Receiver.class)
.setAction("" + notificationId);
PendingIntent pi = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_CANCEL_CURRENT);
AlarmManager am = getAlarmManager();
NotificationManager nc = getNotificationManager();
am.cancel(pi);
try {
nc.cancel(Integer.parseInt(notificationId));
} catch (Exception e) {}
fireEvent("cancel", notificationId, "");
}
/**
* Cancel all notifications that were created by this plugin.
*
* Android can only unregister a specific alarm. There is no such thing
* as cancelAll. Therefore we rely on the Shared Preferences which holds
* all our alarms to loop through these alarms and unregister them one
* by one.
*/
public static void cancelAll() {
SharedPreferences settings = getSharedPreferences();
NotificationManager nc = getNotificationManager();
Map<String, ?> alarms = settings.getAll();
Set<String> alarmIds = alarms.keySet();
for (String alarmId : alarmIds) {
cancel(alarmId);
}
nc.cancelAll();
}
/**
* Persist the information of this alarm to the Android Shared Preferences.
* This will allow the application to restore the alarm upon device reboot.
* Also this is used by the cancelAll method.
*
* @param alarmId
* The Id of the notification that must be persisted.
* @param args
* The assumption is that parse has been called already.
*/
public static void persist (String alarmId, JSONArray args) {
Editor editor = getSharedPreferences().edit();
if (alarmId != null) {
editor.putString(alarmId, args.toString());
editor.commit();
}
}
/**
* Remove a specific alarm from the Android shared Preferences.
*
* @param alarmId
* The Id of the notification that must be removed.
*/
public static void unpersist (String alarmId) {
Editor editor = getSharedPreferences().edit();
if (alarmId != null) {
editor.remove(alarmId);
editor.commit();
}
}
/**
* Clear all alarms from the Android shared Preferences.
*/
public static void unpersistAll () {
Editor editor = getSharedPreferences().edit();
editor.clear();
editor.commit();
}
/**
* Fires the given event.
*
* @param {String} event The Name of the event
* @param {String} id The ID of the notification
* @param {String} json A custom (JSON) string
*/
public static void fireEvent (String event, String id, String json) {
String state = isInBackground() ? "background" : "foreground";
String params = "\"" + id + "\",\"" + state + "\",\\'" + JSONObject.quote(json) + "\\'.replace(/(^\"|\"$)/g, \\'\\')";
String js = "setTimeout('plugin.notification.local.on" + event + "(" + params + ")',0)";
// after reboot, LocalNotification.webView is always be null
// call background callback later
if (webView == null) {
callbackQueue.add(js);
} else {
webView.sendJavascript(js);
}
}
/**
* Set the application context if not already set.
*/
protected static void setContext (Context context) {
if (LocalNotification.context == null) {
LocalNotification.context = context;
}
}
/**
* The Local storage for the application.
*/
protected static SharedPreferences getSharedPreferences () {
return context.getSharedPreferences(PLUGIN_NAME, Context.MODE_PRIVATE);
}
/**
* The alarm manager for the application.
*/
protected static AlarmManager getAlarmManager () {
return (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
}
/**
* The notification manager for the application.
*/
protected static NotificationManager getNotificationManager () {
return (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
}
/**
* Gibt an, ob die App im Hintergrund läuft.
*/
private static boolean isInBackground () {
try {
return !context.getPackageName().equalsIgnoreCase(((ActivityManager)context.getSystemService(Context.ACTIVITY_SERVICE)).getRunningTasks(1).get(0).topActivity.getPackageName());
} catch (Exception e) {
return true;
}
}
/**
* Calls all pending callbacks after the webview was created.
*/
private void execPendingCallbacks () {
for (String js : callbackQueue) {
webView.sendJavascript(js);
}
callbackQueue.clear();
}
}
|
Android: Avoid blocking the main thread.
|
src/android/LocalNotification.java
|
Android: Avoid blocking the main thread.
|
|
Java
|
apache-2.0
|
92b8fcae76bb93ec07c0e9ee602fdaeaead4cb76
| 0
|
metaborg/jsglr,metaborg/jsglr,metaborg/jsglr,metaborg/jsglr
|
package org.spoofax.jsglr2.parser;
import org.metaborg.parsetable.IParseTable;
import org.metaborg.parsetable.actions.IAction;
import org.metaborg.parsetable.actions.IReduce;
import org.metaborg.parsetable.actions.IShift;
import org.metaborg.parsetable.states.IState;
import org.spoofax.jsglr2.parseforest.IDerivation;
import org.spoofax.jsglr2.parseforest.IParseForest;
import org.spoofax.jsglr2.parseforest.ParseForestManager;
import org.spoofax.jsglr2.parser.failure.IParseFailureHandler;
import org.spoofax.jsglr2.parser.observing.ParserObserving;
import org.spoofax.jsglr2.parser.result.ParseFailure;
import org.spoofax.jsglr2.parser.result.ParseFailureType;
import org.spoofax.jsglr2.parser.result.ParseResult;
import org.spoofax.jsglr2.parser.result.ParseSuccess;
import org.spoofax.jsglr2.reducing.ReduceManagerFactory;
import org.spoofax.jsglr2.stack.AbstractStackManager;
import org.spoofax.jsglr2.stack.IStackNode;
public class Parser
//@formatter:off
<ParseForest extends IParseForest,
ParseNode extends ParseForest,
Derivation extends IDerivation<ParseForest>,
StackNode extends IStackNode,
ParseState extends AbstractParseState<ParseForest, StackNode>,
StackManager extends AbstractStackManager<ParseForest, StackNode, ParseState>,
ReduceManager extends org.spoofax.jsglr2.reducing.ReduceManager<ParseForest, ParseNode, Derivation, StackNode, ParseState>>
//@formatter:on
implements IObservableParser<ParseForest, StackNode, ParseState> {
protected final ParseStateFactory<ParseForest, StackNode, ParseState> parseStateFactory;
protected final IParseTable parseTable;
protected final StackManager stackManager;
protected final ParseForestManager<ParseForest, ParseNode, Derivation, StackNode, ParseState> parseForestManager;
public final ReduceManager reduceManager;
protected final IParseFailureHandler<ParseForest, StackNode, ParseState> failureHandler;
protected final ParserObserving<ParseForest, StackNode, ParseState> observing;
public Parser(ParseStateFactory<ParseForest, StackNode, ParseState> parseStateFactory, IParseTable parseTable,
StackManager stackManager,
ParseForestManager<ParseForest, ParseNode, Derivation, StackNode, ParseState> parseForestManager,
ReduceManagerFactory<ParseForest, ParseNode, Derivation, StackNode, ParseState, StackManager, ReduceManager> reduceManagerFactory,
IParseFailureHandler<ParseForest, StackNode, ParseState> failureHandler) {
this.parseStateFactory = parseStateFactory;
this.parseTable = parseTable;
this.stackManager = stackManager;
this.parseForestManager = parseForestManager;
this.reduceManager = reduceManagerFactory.get(parseTable, this.stackManager, parseForestManager);
this.failureHandler = failureHandler;
this.observing = new ParserObserving<>();
}
@Override public ParseResult<ParseForest> parse(String inputString, String filename, String startSymbol) {
ParseState parseState = getParseState(inputString, filename);
observing.notify(observer -> observer.parseStart(parseState));
StackNode initialStackNode = stackManager.createInitialStackNode(observing, parseTable.getStartState());
parseState.activeStacks.add(initialStackNode);
boolean recover;
do {
parseLoop(parseState);
if(parseState.acceptingStack == null)
recover = failureHandler.onFailure(parseState);
else
recover = false;
} while(recover);
if(parseState.acceptingStack != null) {
ParseForest parseForest =
stackManager.findDirectLink(parseState.acceptingStack, initialStackNode).parseForest;
ParseForest parseForestWithStartSymbol = startSymbol != null
? parseForestManager.filterStartSymbol(parseForest, startSymbol, parseState) : parseForest;
if(parseForest != null && parseForestWithStartSymbol == null)
return failure(parseState, ParseFailureType.InvalidStartSymbol);
else
return success(parseState, parseForestWithStartSymbol);
} else
return failure(parseState, failureHandler.failureType(parseState));
}
protected ParseState getParseState(String inputString, String filename) {
return parseStateFactory.get(inputString, filename, observing);
}
protected ParseSuccess<ParseForest> success(ParseState parseState, ParseForest parseForest) {
ParseSuccess<ParseForest> success = new ParseSuccess<>(parseState, parseForest);
observing.notify(observer -> observer.success(success));
return success;
}
protected ParseFailure<ParseForest> failure(ParseState parseState, ParseFailureType failureType) {
ParseFailure<ParseForest> failure = new ParseFailure<>(parseState, failureType);
observing.notify(observer -> observer.failure(failure));
return failure;
}
protected void parseLoop(ParseState parseState) {
while(parseState.hasNext() && !parseState.activeStacks.isEmpty()) {
parseCharacter(parseState);
if(!parseState.activeStacks.isEmpty())
parseState.next();
}
}
protected void parseCharacter(ParseState parseState) {
observing.notify(observer -> observer.parseRound(parseState, parseState.activeStacks));
parseState.activeStacks.addAllTo(parseState.forActorStacks);
observing.notify(observer -> observer.forActorStacks(parseState.forActorStacks));
processForActorStacks(parseState);
shifter(parseState);
}
protected void processForActorStacks(ParseState parseState) {
while(parseState.forActorStacks.nonEmpty()) {
StackNode stack = parseState.forActorStacks.remove();
observing.notify(observer -> observer.handleForActorStack(stack, parseState.forActorStacks));
if(!stack.allLinksRejected())
actor(stack, parseState);
else
observing.notify(observer -> observer.skipRejectedStack(stack));
}
}
protected void actor(StackNode stack, ParseState parseState) {
observing.notify(observer -> observer.actor(stack, parseState, stack.state().getApplicableActions(parseState)));
for(IAction action : stack.state().getApplicableActions(parseState))
actor(stack, parseState, action);
}
protected void actor(StackNode stack, ParseState parseState, IAction action) {
switch(action.actionType()) {
case SHIFT:
IShift shiftAction = (IShift) action;
IState shiftState = parseTable.getState(shiftAction.shiftStateId());
addForShifter(parseState, stack, shiftState);
break;
case REDUCE:
case REDUCE_LOOKAHEAD: // Lookahead is checked while retrieving applicable actions from the state
IReduce reduceAction = (IReduce) action;
reduceManager.doReductions(observing, parseState, stack, reduceAction);
break;
case ACCEPT:
parseState.acceptingStack = stack;
observing.notify(observer -> observer.accept(stack));
break;
}
}
protected void shifter(ParseState parseState) {
parseState.activeStacks.clear();
ParseForest characterNode = getNodeToShift(parseState);
observing.notify(observer -> observer.shifter(characterNode, parseState.forShifter));
for(ForShifterElement<StackNode> forShifterElement : parseState.forShifter) {
StackNode activeStackForState = parseState.activeStacks.findWithState(forShifterElement.state);
if(activeStackForState != null) {
stackManager.createStackLink(observing, parseState, activeStackForState, forShifterElement.stack,
characterNode);
} else {
StackNode newStack = stackManager.createStackNode(observing, forShifterElement.state);
stackManager.createStackLink(observing, parseState, newStack, forShifterElement.stack, characterNode);
parseState.activeStacks.add(newStack);
}
}
parseState.forShifter.clear();
}
protected ParseForest getNodeToShift(ParseState parseState) {
return parseForestManager.createCharacterNode(observing, parseState);
}
private void addForShifter(ParseState parseState, StackNode stack, IState shiftState) {
ForShifterElement<StackNode> forShifterElement = new ForShifterElement<>(stack, shiftState);
observing.notify(observer -> observer.addForShifter(forShifterElement));
parseState.forShifter.add(forShifterElement);
}
@Override public ParserObserving<ParseForest, StackNode, ParseState> observing() {
return observing;
}
}
|
org.spoofax.jsglr2/src/main/java/org/spoofax/jsglr2/parser/Parser.java
|
package org.spoofax.jsglr2.parser;
import org.metaborg.parsetable.IParseTable;
import org.metaborg.parsetable.actions.IAction;
import org.metaborg.parsetable.actions.IReduce;
import org.metaborg.parsetable.actions.IShift;
import org.metaborg.parsetable.states.IState;
import org.spoofax.jsglr2.parseforest.IDerivation;
import org.spoofax.jsglr2.parseforest.IParseForest;
import org.spoofax.jsglr2.parseforest.ParseForestManager;
import org.spoofax.jsglr2.parser.failure.IParseFailureHandler;
import org.spoofax.jsglr2.parser.observing.ParserObserving;
import org.spoofax.jsglr2.parser.result.ParseFailure;
import org.spoofax.jsglr2.parser.result.ParseFailureType;
import org.spoofax.jsglr2.parser.result.ParseResult;
import org.spoofax.jsglr2.parser.result.ParseSuccess;
import org.spoofax.jsglr2.reducing.ReduceManagerFactory;
import org.spoofax.jsglr2.stack.AbstractStackManager;
import org.spoofax.jsglr2.stack.IStackNode;
public class Parser
//@formatter:off
<ParseForest extends IParseForest,
ParseNode extends ParseForest,
Derivation extends IDerivation<ParseForest>,
StackNode extends IStackNode,
ParseState extends AbstractParseState<ParseForest, StackNode>,
StackManager extends AbstractStackManager<ParseForest, StackNode, ParseState>,
ReduceManager extends org.spoofax.jsglr2.reducing.ReduceManager<ParseForest, ParseNode, Derivation, StackNode, ParseState>>
//@formatter:on
implements IObservableParser<ParseForest, StackNode, ParseState> {
protected final ParseStateFactory<ParseForest, StackNode, ParseState> parseStateFactory;
protected final IParseTable parseTable;
protected final StackManager stackManager;
protected final ParseForestManager<ParseForest, ParseNode, Derivation, StackNode, ParseState> parseForestManager;
public final ReduceManager reduceManager;
protected final IParseFailureHandler<ParseForest, StackNode, ParseState> failureHandler;
protected final ParserObserving<ParseForest, StackNode, ParseState> observing;
public Parser(ParseStateFactory<ParseForest, StackNode, ParseState> parseStateFactory, IParseTable parseTable,
StackManager stackManager,
ParseForestManager<ParseForest, ParseNode, Derivation, StackNode, ParseState> parseForestManager,
ReduceManagerFactory<ParseForest, ParseNode, Derivation, StackNode, ParseState, StackManager, ReduceManager> reduceManagerFactory,
IParseFailureHandler<ParseForest, StackNode, ParseState> failureHandler) {
this.parseStateFactory = parseStateFactory;
this.parseTable = parseTable;
this.stackManager = stackManager;
this.parseForestManager = parseForestManager;
this.reduceManager = reduceManagerFactory.get(parseTable, this.stackManager, parseForestManager);
this.failureHandler = failureHandler;
this.observing = new ParserObserving<>();
}
@Override public ParseResult<ParseForest> parse(String inputString, String filename, String startSymbol) {
ParseState parseState = getParseState(inputString, filename);
observing.notify(observer -> observer.parseStart(parseState));
StackNode initialStackNode = stackManager.createInitialStackNode(observing, parseTable.getStartState());
parseState.activeStacks.add(initialStackNode);
boolean recover = false;
do {
parseLoop(parseState);
if(parseState.acceptingStack == null)
recover = failureHandler.onFailure(parseState);
else
recover = false;
} while(recover);
if(parseState.acceptingStack != null) {
ParseForest parseForest =
stackManager.findDirectLink(parseState.acceptingStack, initialStackNode).parseForest;
ParseForest parseForestWithStartSymbol = startSymbol != null
? parseForestManager.filterStartSymbol(parseForest, startSymbol, parseState) : parseForest;
if(parseForest != null && parseForestWithStartSymbol == null)
return failure(parseState, ParseFailureType.InvalidStartSymbol);
else
return success(parseState, parseForestWithStartSymbol);
} else
return failure(parseState, failureHandler.failureType(parseState));
}
protected ParseState getParseState(String inputString, String filename) {
return parseStateFactory.get(inputString, filename, observing);
}
protected ParseSuccess<ParseForest> success(ParseState parseState, ParseForest parseForest) {
ParseSuccess<ParseForest> success = new ParseSuccess<>(parseState, parseForest);
observing.notify(observer -> observer.success(success));
return success;
}
protected ParseFailure<ParseForest> failure(ParseState parseState, ParseFailureType failureType) {
ParseFailure<ParseForest> failure = new ParseFailure<>(parseState, failureType);
observing.notify(observer -> observer.failure(failure));
return failure;
}
protected void parseLoop(ParseState parseState) {
while(parseState.hasNext() && !parseState.activeStacks.isEmpty()) {
parseCharacter(parseState);
if(!parseState.activeStacks.isEmpty())
parseState.next();
}
}
protected void parseCharacter(ParseState parseState) {
observing.notify(observer -> observer.parseRound(parseState, parseState.activeStacks));
parseState.activeStacks.addAllTo(parseState.forActorStacks);
observing.notify(observer -> observer.forActorStacks(parseState.forActorStacks));
processForActorStacks(parseState);
shifter(parseState);
}
protected void processForActorStacks(ParseState parseState) {
while(parseState.forActorStacks.nonEmpty()) {
StackNode stack = parseState.forActorStacks.remove();
observing.notify(observer -> observer.handleForActorStack(stack, parseState.forActorStacks));
if(!stack.allLinksRejected())
actor(stack, parseState);
else
observing.notify(observer -> observer.skipRejectedStack(stack));
}
}
protected void actor(StackNode stack, ParseState parseState) {
observing.notify(observer -> observer.actor(stack, parseState, stack.state().getApplicableActions(parseState)));
for(IAction action : stack.state().getApplicableActions(parseState))
actor(stack, parseState, action);
}
protected void actor(StackNode stack, ParseState parseState, IAction action) {
switch(action.actionType()) {
case SHIFT:
IShift shiftAction = (IShift) action;
IState shiftState = parseTable.getState(shiftAction.shiftStateId());
addForShifter(parseState, stack, shiftState);
break;
case REDUCE:
case REDUCE_LOOKAHEAD: // Lookahead is checked while retrieving applicable actions from the state
IReduce reduceAction = (IReduce) action;
reduceManager.doReductions(observing, parseState, stack, reduceAction);
break;
case ACCEPT:
parseState.acceptingStack = stack;
observing.notify(observer -> observer.accept(stack));
break;
}
}
protected void shifter(ParseState parseState) {
parseState.activeStacks.clear();
ParseForest characterNode = getNodeToShift(parseState);
observing.notify(observer -> observer.shifter(characterNode, parseState.forShifter));
for(ForShifterElement<StackNode> forShifterElement : parseState.forShifter) {
StackNode activeStackForState = parseState.activeStacks.findWithState(forShifterElement.state);
if(activeStackForState != null) {
stackManager.createStackLink(observing, parseState, activeStackForState, forShifterElement.stack,
characterNode);
} else {
StackNode newStack = stackManager.createStackNode(observing, forShifterElement.state);
stackManager.createStackLink(observing, parseState, newStack, forShifterElement.stack, characterNode);
parseState.activeStacks.add(newStack);
}
}
parseState.forShifter.clear();
}
protected ParseForest getNodeToShift(ParseState parseState) {
return parseForestManager.createCharacterNode(observing, parseState);
}
private void addForShifter(ParseState parseState, StackNode stack, IState shiftState) {
ForShifterElement<StackNode> forShifterElement = new ForShifterElement<>(stack, shiftState);
observing.notify(observer -> observer.addForShifter(forShifterElement));
parseState.forShifter.add(forShifterElement);
}
@Override public ParserObserving<ParseForest, StackNode, ParseState> observing() {
return observing;
}
}
|
Remove redundant initializer
|
org.spoofax.jsglr2/src/main/java/org/spoofax/jsglr2/parser/Parser.java
|
Remove redundant initializer
|
|
Java
|
apache-2.0
|
546bb72ffcd886389c0137cf60ef8237e2d5e080
| 0
|
griffon/griffon,tschulte/griffon,levymoreira/griffon,levymoreira/griffon,tschulte/griffon,griffon/griffon,tschulte/griffon,levymoreira/griffon
|
/*
* Copyright 2008-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.griffon.runtime.util;
import griffon.core.*;
import griffon.core.controller.GriffonControllerAction;
import griffon.core.controller.GriffonControllerActionInterceptor;
import griffon.core.controller.GriffonControllerActionManager;
import griffon.core.factories.*;
import griffon.core.resources.ResourcesInjector;
import griffon.exceptions.GriffonException;
import griffon.util.*;
import griffon.util.logging.LogManager;
import groovy.lang.*;
import groovy.util.ConfigObject;
import groovy.util.FactoryBuilderSupport;
import org.codehaus.griffon.runtime.core.ControllerArtifactHandler;
import org.codehaus.griffon.runtime.core.ModelArtifactHandler;
import org.codehaus.griffon.runtime.core.ServiceArtifactHandler;
import org.codehaus.griffon.runtime.core.ViewArtifactHandler;
import org.codehaus.griffon.runtime.core.controller.NoopGriffonControllerActionManager;
import org.codehaus.groovy.runtime.InvokerHelper;
import org.codehaus.groovy.runtime.ResourceGroovyMethods;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.beans.PropertyEditor;
import java.beans.PropertyEditorManager;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.*;
import static griffon.util.ConfigUtils.*;
import static griffon.util.GriffonExceptionHandler.handleThrowable;
import static griffon.util.GriffonExceptionHandler.sanitize;
import static griffon.util.GriffonNameUtils.isBlank;
import static java.util.Arrays.asList;
import static org.codehaus.groovy.runtime.ResourceGroovyMethods.eachLine;
/**
* Utility class for bootstrapping an application and handling of MVC groups.</p>
*
* @author Danno Ferrin
* @author Andres Almiray
*/
public class GriffonApplicationHelper {
private static final Logger LOG = LoggerFactory.getLogger(GriffonApplicationHelper.class);
private static final Map<String, String> DEFAULT_PLATFORM_HANDLERS = CollectionUtils.<String, String>map()
.e("linux", "org.codehaus.griffon.runtime.util.DefaultLinuxPlatformHandler")
.e("linux64", "org.codehaus.griffon.runtime.util.DefaultLinuxPlatformHandler")
.e("macosx", "org.codehaus.griffon.runtime.util.DefaultMacOSXPlatformHandler")
.e("macosx64", "org.codehaus.griffon.runtime.util.DefaultMacOSXPlatformHandler")
.e("solaris", "org.codehaus.griffon.runtime.util.DefaultSolarisPlatformHandler")
.e("windows", "org.codehaus.griffon.runtime.util.DefaultWindowsPlatformHandler")
.e("windows64", "org.codehaus.griffon.runtime.util.DefaultWindowsPlatformHandler");
private static final String LOCATION_CLASSPATH = "classpath:";
private static final String LOCATION_FILE = "file:";
private static final String PROPERTIES_SUFFIX = ".properties";
private static final String GROOVY_SUFFIX = ".groovy";
private static final String KEY_MESSAGE_SOURCE_FACTORY = "app.messageSource.factory";
private static final String KEY_RESOURCES_INJECTOR_FACTORY = "app.resourceInjector.factory";
private static final String KEY_EVENT_ROUTER_FACTORY = "app.eventRouter.factory";
private static final String KEY_ADDON_MANAGER_FACTORY = "app.addonManager.factory";
private static final String KEY_ARTIFACT_MANAGER_FACTORY = "app.artifactManager.factory";
private static final String KEY_ACTION_MANAGER_FACTORY = "app.actionManager.factory";
private static final String KEY_MVCGROUP_MANAGER_FACTORY = "app.mvcGroupManager.factory";
private static final String KEY_RESOURCE_RESOLVER_FACTORY = "app.resourceResolver.factory";
private static final String KEY_LOG_MANAGER_FACTORY = "app.logManager.factory";
private static final String KEY_APP_LIFECYCLE_HANDLER_DISABLE = "app.lifecycle.handler.disable";
private static final String KEY_GRIFFON_ACTION_MANAGER_DISABLE = "griffon.action.manager.disable";
private static final String KEY_GRIFFON_CONTROLLER_ACTION_INTERCEPTOR_ORDER = "griffon.controller.action.interceptor.order";
private static final String DEFAULT_MESSAGE_SOURCE_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultMessageSourceFactory";
private static final String DEFAULT_RESOURCES_INJECTOR_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultResourcesInjectorFactory";
private static final String DEFAULT_EVENT_ROUTER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultEventRouterFactory";
private static final String DEFAULT_ADDON_MANAGER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultAddonManagerFactory";
private static final String DEFAULT_ARTIFACT_MANAGER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultArtifactManagerFactory";
private static final String DEFAULT_MVCGROUP_MANAGER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultMVCGroupManagerFactory";
private static final String DEFAULT_RESOURCE_RESOLVER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultResourceResolverFactory";
private static final String DEFAULT_LOG_MANAGER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultLogManagerFactory";
static {
ExpandoMetaClassCreationHandle.enable();
}
/**
* Creates, register and assigns an ExpandoMetaClass for a target class.<p>
* The newly created metaClass will accept changes after initialization.
*
* @param clazz the target class
* @return an ExpandoMetaClass
*/
public static MetaClass expandoMetaClassFor(Class clazz) {
MetaClass mc = GroovySystem.getMetaClassRegistry().getMetaClass(clazz);
if (!(mc instanceof ExpandoMetaClass)) {
mc = new ExpandoMetaClass(clazz, true, true);
mc.initialize();
GroovySystem.getMetaClassRegistry().setMetaClass(clazz, mc);
}
return mc;
}
/**
* Setups an application.<p>
* This method performs the following tasks<ul>
* <li>Sets "griffon.start.dir" as system property.</li>
* <li>Calls the Initialize life cycle script.</li>
* <li>Reads runtime and builder configuration.</li>
* <li>Setups basic artifact handlers.</li>
* <li>Initializes available addons.</li>
* </ul>
*
* @param app the current Griffon application
*/
public static void prepare(GriffonApplication app) {
app.getBindings().setVariable("app", app);
Metadata.getCurrent().getGriffonStartDir();
Metadata.getCurrent().getGriffonWorkingDir();
readAndSetConfiguration(app);
app.event(GriffonApplication.Event.BOOTSTRAP_START.getName(), asList(app));
initializeMessageSource(app);
initializeResourceResolver(app);
initializeResourcesInjector(app);
initializePropertyEditors(app);
applyPlatformTweaks(app);
runLifecycleHandler(GriffonApplication.Lifecycle.INITIALIZE.getName(), app);
initializeArtifactManager(app);
initializeMvcManager(app);
initializeAddonManager(app);
initializeActionManager(app);
app.event(GriffonApplication.Event.BOOTSTRAP_END.getName(), asList(app));
}
private static ConfigObject doLoadConfig(ConfigReader configReader, Class configClass, String configFileName) {
if (configClass != null) configFileName = configClass.getSimpleName();
return loadConfig(configReader, configClass, configFileName);
}
private static ConfigObject doLoadConfigWithI18n(Locale locale, ConfigReader configReader, Class configClass, String configFileName) {
if (configClass != null) configFileName = configClass.getSimpleName();
return loadConfigWithI18n(locale, configReader, configClass, configFileName);
}
private static void readAndSetConfiguration(final GriffonApplication app) {
ConfigReader configReader = createConfigReader();
ConfigObject appConfig = doLoadConfig(configReader, app.getAppConfigClass(), GriffonApplication.Configuration.APPLICATION.getName());
setApplicationLocale(app, getConfigValue(appConfig, "application.locale", Locale.getDefault()));
appConfig = doLoadConfigWithI18n(app.getLocale(), configReader, app.getAppConfigClass(), GriffonApplication.Configuration.APPLICATION.getName());
app.setConfig(appConfig);
app.getConfig().merge(doLoadConfigWithI18n(app.getLocale(), configReader, app.getConfigClass(), GriffonApplication.Configuration.CONFIG.getName()));
initializeLogManager(app);
loadExternalConfig(app, configReader);
GriffonExceptionHandler.configure(app.getConfig().flatten(new LinkedHashMap()));
app.setBuilderConfig(doLoadConfigWithI18n(app.getLocale(), configReader, app.getBuilderClass(), GriffonApplication.Configuration.BUILDER.getName()));
initializeEventRouter(app);
Object events = safeNewInstance(app.getEventsClass(), false);
if (events != null) {
app.setEventsConfig(events);
app.addApplicationEventListener(app.getEventsConfig());
}
}
private static void loadExternalConfig(GriffonApplication app, ConfigReader configReader) {
List<String> locations = (List<String>) getConfigValue(app.getConfig(), "griffon.config.locations", Collections.emptyList());
for (String location : locations) {
boolean groovyScriptAllowed = false;
String parsedLocation = location;
if (location.startsWith(LOCATION_CLASSPATH)) {
parsedLocation = location.substring(LOCATION_CLASSPATH.length()).trim();
} else if (location.startsWith(LOCATION_FILE)) {
parsedLocation = location.substring(LOCATION_FILE.length()).trim();
} else {
// assume it's a class definition
groovyScriptAllowed = true;
}
if (groovyScriptAllowed) {
Class locationScriptClass = safeLoadClass(parsedLocation);
if (locationScriptClass != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading external configuration location '" + location + "'.");
}
app.getConfig().merge(loadConfigWithI18n(app.getLocale(), configReader, locationScriptClass, null));
} else {
// invalid location. Log & skip
if (LOG.isWarnEnabled()) {
LOG.warn("Skipping invalid external configuration location '" + location + "'.");
}
}
} else if (parsedLocation.endsWith(PROPERTIES_SUFFIX) || parsedLocation.endsWith(GROOVY_SUFFIX)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading external configuration location '" + location + "'.");
}
app.getConfig().merge(loadConfigWithI18n(app.getLocale(), configReader, null, parsedLocation));
} else {
// invalid location. Log & skip
if (LOG.isWarnEnabled()) {
LOG.warn("Skipping invalid external configuration location '" + location + "'.");
}
}
}
}
private static void initializeLogManager(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_LOG_MANAGER_FACTORY, DEFAULT_LOG_MANAGER_FACTORY);
LogManagerFactory factory = (LogManagerFactory) safeNewInstance(className);
LogManager logManager = factory.create(app);
logManager.configure(app.getConfig());
}
private static void initializeMessageSource(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_MESSAGE_SOURCE_FACTORY, DEFAULT_MESSAGE_SOURCE_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as MessageSourceFactory");
}
MessageSourceFactory factory = (MessageSourceFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "messageSource", factory.create(app));
}
private static void initializeResourceResolver(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_RESOURCE_RESOLVER_FACTORY, DEFAULT_RESOURCE_RESOLVER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as ResourceResolverFactory");
}
ResourceResolverFactory factory = (ResourceResolverFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "resourceResolver", factory.create(app));
}
private static void initializeResourcesInjector(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_RESOURCES_INJECTOR_FACTORY, DEFAULT_RESOURCES_INJECTOR_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as ResourcesInjectorFactory");
}
ResourcesInjectorFactory factory = (ResourcesInjectorFactory) safeNewInstance(className);
final ResourcesInjector injector = factory.create(app);
app.addApplicationEventListener(GriffonApplication.Event.NEW_INSTANCE.getName(), new RunnableWithArgs() {
public void run(Object[] args) {
Object instance = args[2];
injector.injectResources(instance);
}
});
}
private static void initializePropertyEditors(GriffonApplication app) {
Enumeration<URL> urls = null;
try {
urls = ApplicationClassLoader.get().getResources("META-INF/services/" + PropertyEditor.class.getName());
} catch (IOException ioe) {
return;
}
if (urls == null) return;
while (urls.hasMoreElements()) {
URL url = urls.nextElement();
if (LOG.isDebugEnabled()) {
LOG.debug("Reading " + PropertyEditor.class.getName() + " definitions from " + url);
}
try {
eachLine(url, new RunnableWithArgsClosure(new RunnableWithArgs() {
@Override
public void run(Object[] args) {
String line = (String) args[0];
if (line.startsWith("#") || isBlank(line)) return;
try {
String[] parts = line.trim().split("=");
Class targetType = loadClass(parts[0].trim());
Class editorClass = loadClass(parts[1].trim());
if (LOG.isDebugEnabled()) {
LOG.debug("Registering " + editorClass.getName() + " as editor for " + targetType.getName());
}
PropertyEditorManager.registerEditor(targetType, editorClass);
} catch (Exception e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Could not load PropertyEditor with " + line, sanitize(e));
}
}
}
}));
} catch (IOException e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Could not load PropertyEditor definitions from " + url, sanitize(e));
}
}
}
}
private static void initializeEventRouter(GriffonApplication app) {
InvokerHelper.setProperty(app, "eventRouter", createEventRouter(app));
}
public static EventRouter createEventRouter(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_EVENT_ROUTER_FACTORY, DEFAULT_EVENT_ROUTER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as EventRouterFactory");
}
EventRouterFactory factory = (EventRouterFactory) safeNewInstance(className);
return factory.create(app);
}
private static void setApplicationLocale(GriffonApplication app, Object localeValue) {
if (localeValue instanceof Locale) {
app.setLocale((Locale) localeValue);
} else if (localeValue instanceof CharSequence) {
app.setLocale(parseLocale(String.valueOf(localeValue)));
}
}
public static Locale parseLocale(String locale) {
if (isBlank(locale)) return Locale.getDefault();
String[] parts = locale.split("_");
switch (parts.length) {
case 1:
return new Locale(parts[0]);
case 2:
return new Locale(parts[0], parts[1]);
case 3:
return new Locale(parts[0], parts[1], parts[2]);
default:
return Locale.getDefault();
}
}
public static void applyPlatformTweaks(GriffonApplication app) {
String platform = GriffonApplicationUtils.platform;
String handlerClassName = getConfigValueAsString(app.getConfig(), "platform.handler." + platform, DEFAULT_PLATFORM_HANDLERS.get(platform));
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + handlerClassName + " as PlatformHandler");
}
PlatformHandler platformHandler = (PlatformHandler) safeNewInstance(handlerClassName);
platformHandler.handle(app);
}
private static void initializeArtifactManager(GriffonApplication app) {
if (app.getArtifactManager() == null) {
String className = getConfigValueAsString(app.getConfig(), KEY_ARTIFACT_MANAGER_FACTORY, DEFAULT_ARTIFACT_MANAGER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as ArtifactManagerFactory");
}
ArtifactManagerFactory factory = (ArtifactManagerFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "artifactManager", factory.create(app));
}
// initialize default Artifact handlers
app.getArtifactManager().registerArtifactHandler(new ModelArtifactHandler(app));
app.getArtifactManager().registerArtifactHandler(new ViewArtifactHandler(app));
app.getArtifactManager().registerArtifactHandler(new ControllerArtifactHandler(app));
if (!ServiceArtifactHandler.isBasicInjectionDisabled()) {
app.getArtifactManager().registerArtifactHandler(new ServiceArtifactHandler(app));
}
// load additional handlers
loadArtifactHandlers(app);
app.getArtifactManager().loadArtifactMetadata();
}
private static void initializeAddonManager(GriffonApplication app) {
if (app.getAddonManager() == null) {
String className = getConfigValueAsString(app.getConfig(), KEY_ADDON_MANAGER_FACTORY, DEFAULT_ADDON_MANAGER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as AddonManagerFactory");
}
AddonManagerFactory factory = (AddonManagerFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "addonManager", factory.create(app));
}
app.getAddonManager().initialize();
}
private static void initializeActionManager(GriffonApplication app) {
InvokerHelper.setProperty(app, "actionManager", new NoopGriffonControllerActionManager(app));
boolean disableActionManager = getConfigValueAsBoolean(app.getConfig(), KEY_GRIFFON_ACTION_MANAGER_DISABLE, false);
if (disableActionManager) {
if (LOG.isInfoEnabled()) {
LOG.info("GriffonControllerActionManager is disabled.");
}
return;
}
String className = getConfigValueAsString(app.getConfig(), KEY_ACTION_MANAGER_FACTORY, null);
if (isBlank(className) || "null".equals(className)) {
URL url = ApplicationClassLoader.get().getResource("META-INF/services/" + GriffonControllerActionManagerFactory.class.getName());
if (null == url) {
if (LOG.isInfoEnabled()) {
LOG.info("GriffonControllerActionManager is disabled.");
}
return;
}
try {
className = ResourceGroovyMethods.getText(url).trim();
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Cannot read GriffonControllerActionManager definition from " + url, sanitize(e));
className = null;
}
}
}
if (isBlank(className)) {
if (LOG.isInfoEnabled()) {
LOG.info("GriffonControllerActionManager is disabled.");
}
return;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as GriffonControllerActionManagerFactory");
}
GriffonControllerActionManagerFactory factory = (GriffonControllerActionManagerFactory) safeNewInstance(className);
final GriffonControllerActionManager actionManager = factory.create(app);
InvokerHelper.setProperty(app, "actionManager", actionManager);
app.addApplicationEventListener(GriffonApplication.Event.NEW_INSTANCE.getName(), new RunnableWithArgs() {
public void run(Object[] args) {
String type = (String) args[1];
if (GriffonControllerClass.TYPE.equals(type)) {
GriffonController controller = (GriffonController) args[2];
actionManager.createActions(controller);
}
}
});
app.addApplicationEventListener(GriffonApplication.Event.INITIALIZE_MVC_GROUP.getName(), new RunnableWithArgs() {
public void run(Object[] args) {
MVCGroupConfiguration groupConfig = (MVCGroupConfiguration) args[0];
MVCGroup group = (MVCGroup) args[1];
GriffonController controller = group.getController();
if (controller == null) return;
FactoryBuilderSupport builder = group.getBuilder();
Map<String, GriffonControllerAction> actions = actionManager.actionsFor(controller);
for (Map.Entry<String, GriffonControllerAction> action : actions.entrySet()) {
String actionKey = actionManager.normalizeName(action.getKey()) + GriffonControllerActionManager.ACTION;
if (LOG.isTraceEnabled()) {
LOG.trace("Adding action " + actionKey + " to " + groupConfig.getMvcType() + ":" + group.getMvcId() + ":builder");
}
builder.setVariable(actionKey, action.getValue().getToolkitAction());
}
}
});
Map<String, Map<String, Object>> actionInterceptors = new LinkedHashMap<String, Map<String, Object>>();
for (GriffonAddon addon : app.getAddonManager().getAddons().values()) {
Map<String, Map<String, Object>> interceptors = addon.getActionInterceptors();
if (interceptors != null && !interceptors.isEmpty()) {
actionInterceptors.putAll(interceptors);
}
}
// grab application specific order
List<String> interceptorOrder = (List<String>) getConfigValue(app.getConfig(), KEY_GRIFFON_CONTROLLER_ACTION_INTERCEPTOR_ORDER, Collections.emptyList());
Map<String, Map<String, Object>> tmp = new LinkedHashMap<String, Map<String, Object>>(actionInterceptors);
Map<String, Map<String, Object>> map = new LinkedHashMap<String, Map<String, Object>>();
for(String interceptorName: interceptorOrder) {
if(tmp.containsKey(interceptorName)) {
map.put(interceptorName, tmp.remove(interceptorName));
}
}
map.putAll(tmp);
actionInterceptors.clear();
actionInterceptors.putAll(map);
if (LOG.isDebugEnabled()) {
LOG.debug("Chosen interceptor order is " + map.keySet());
}
List<GriffonControllerActionInterceptor> sortedInterceptors = new ArrayList<GriffonControllerActionInterceptor>();
Set<String> addedDeps = new LinkedHashSet<String>();
while (!map.isEmpty()) {
int filtersAdded = 0;
if (LOG.isDebugEnabled()) {
LOG.debug("Current interceptor order is " + actionInterceptors.keySet());
}
for (Iterator<Map.Entry<String, Map<String, Object>>> iter = map.entrySet().iterator(); iter.hasNext(); ) {
Map.Entry<String, Map<String, Object>> entry = iter.next();
String interceptorName = entry.getKey();
List<String> dependsOn = (List<String>) getConfigValue(entry.getValue(), "dependsOn", Collections.emptyList());
String interceptorClassName = (String) getConfigValue(entry.getValue(), "interceptor", null);
if (LOG.isDebugEnabled()) {
LOG.debug("Processing interceptor '" + interceptorName + "'");
LOG.debug(" depends on '" + dependsOn + "'");
}
if (isBlank(interceptorClassName)) {
if (LOG.isDebugEnabled()) {
LOG.debug(" Skipped interceptor '" + interceptorName + "', since it does not define an interceptor class");
}
iter.remove();
continue;
}
if (!dependsOn.isEmpty()) {
if (LOG.isDebugEnabled()) {
LOG.debug(" Checking interceptor '" + interceptorName + "' dependencies (" + dependsOn.size() + ")");
}
boolean failedDep = false;
for (String dep : dependsOn) {
if (LOG.isDebugEnabled()) {
LOG.debug(" Checking interceptor '" + interceptorName + "' dependencies: " + dep);
}
if (!addedDeps.contains(dep)) {
// dep not in the list yet, we need to skip adding this to the list for now
if (LOG.isDebugEnabled()) {
LOG.debug(" Skipped interceptor '" + interceptorName + "', since dependency '" + dep + "' not yet added");
}
failedDep = true;
break;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(" Interceptor '" + interceptorName + "' dependency '" + dep + "' already added");
}
}
}
if (failedDep) {
// move on to next dependency
continue;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug(" Adding interceptor '" + interceptorName + "', since all dependencies have been added");
}
sortedInterceptors.add((GriffonControllerActionInterceptor) newInstance(app, safeLoadClass(interceptorClassName)));
addedDeps.add(interceptorName);
iter.remove();
filtersAdded++;
}
if (filtersAdded == 0) {
// we have a cyclical dependency, warn the user and load in the order they appeared originally
if (LOG.isWarnEnabled()) {
LOG.warn("::::::::::::::::::::::::::::::::::::::::::::::::::::::");
LOG.warn(":: Unresolved interceptor dependencies detected ::");
LOG.warn(":: Continuing with original interceptor order ::");
LOG.warn("::::::::::::::::::::::::::::::::::::::::::::::::::::::");
}
for (Map.Entry<String, Map<String, Object>> entry : map.entrySet()) {
String interceptorName = entry.getKey();
List<String> dependsOn = (List<String>) getConfigValue(entry.getValue(), "dependsOn", Collections.emptyList());
// display this as a cyclical dep
if (LOG.isWarnEnabled()) {
LOG.warn(":: Interceptor " + interceptorName);
}
if (!dependsOn.isEmpty()) {
for (String dep : dependsOn) {
if (LOG.isWarnEnabled()) {
LOG.warn(":: depends on " + dep);
}
}
} else {
// we should only have items left in the list with deps, so this should never happen
// but a wise man once said...check for true, false and otherwise...just in case
if (LOG.isWarnEnabled()) {
LOG.warn(":: Problem while resolving dependencies.");
LOG.warn(":: Unable to resolve dependency hierarchy.");
}
}
if (LOG.isWarnEnabled()) {
LOG.warn("::::::::::::::::::::::::::::::::::::::::::::::::::::::");
}
}
break;
// if we have processed all the interceptors, we are done
} else if (sortedInterceptors.size() == actionInterceptors.size()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Interceptor dependency ordering complete");
}
break;
}
}
for (GriffonControllerActionInterceptor interceptor : sortedInterceptors) {
actionManager.addActionInterceptor(interceptor);
}
}
private static void initializeMvcManager(GriffonApplication app) {
if (app.getMvcGroupManager() == null) {
String className = getConfigValueAsString(app.getConfig(), KEY_MVCGROUP_MANAGER_FACTORY, DEFAULT_MVCGROUP_MANAGER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as MVCGroupManagerFactory");
}
MVCGroupManagerFactory factory = (MVCGroupManagerFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "mvcGroupManager", factory.create(app));
}
Map<String, MVCGroupConfiguration> configurations = new LinkedHashMap<String, MVCGroupConfiguration>();
Map<String, Map<String, Object>> mvcGroups = (Map<String, Map<String, Object>>) app.getConfig().get("mvcGroups");
if (mvcGroups != null) {
for (Map.Entry<String, Map<String, Object>> groupEntry : mvcGroups.entrySet()) {
String type = groupEntry.getKey();
if (LOG.isDebugEnabled()) {
LOG.debug("Adding MVC group " + type);
}
Map<String, Object> members = groupEntry.getValue();
Map<String, Object> configMap = new LinkedHashMap<String, Object>();
Map<String, String> membersCopy = new LinkedHashMap<String, String>();
for (Object o : members.entrySet()) {
Map.Entry entry = (Map.Entry) o;
String key = String.valueOf(entry.getKey());
if ("config".equals(key) && entry.getValue() instanceof Map) {
configMap = (Map<String, Object>) entry.getValue();
} else {
membersCopy.put(key, String.valueOf(entry.getValue()));
}
}
configurations.put(type, app.getMvcGroupManager().newMVCGroupConfiguration(type, membersCopy, configMap));
}
}
app.getMvcGroupManager().initialize(configurations);
}
private static void loadArtifactHandlers(final GriffonApplication app) {
Enumeration<URL> urls = null;
try {
urls = ApplicationClassLoader.get().getResources("META-INF/services/" + ArtifactHandler.class.getName());
} catch (IOException ioe) {
return;
}
if (urls == null) return;
while (urls.hasMoreElements()) {
URL url = urls.nextElement();
if (LOG.isDebugEnabled()) {
LOG.debug("Reading " + ArtifactHandler.class.getName() + " definitions from " + url);
}
try {
eachLine(url, new RunnableWithArgsClosure(new RunnableWithArgs() {
@Override
public void run(Object[] args) {
String line = (String) args[0];
if (line.startsWith("#") || isBlank(line)) return;
try {
Class artifactHandlerClass = loadClass(line);
Constructor ctor = artifactHandlerClass.getDeclaredConstructor(GriffonApplication.class);
ArtifactHandler handler = null;
if (ctor != null) {
handler = (ArtifactHandler) ctor.newInstance(app);
} else {
handler = (ArtifactHandler) safeNewInstance(artifactHandlerClass);
}
app.getArtifactManager().registerArtifactHandler(handler);
} catch (Exception e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Could not load ArtifactHandler with " + line, sanitize(e));
}
}
}
}));
} catch (IOException e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Could not load ArtifactHandler from " + url, sanitize(e));
}
}
}
}
/**
* Executes a script inside the UI Thread.<p>
* On Swing this would be the Event Dispatch Thread.
*/
public static void runLifecycleHandler(String handlerName, GriffonApplication app) {
boolean skipHandler = getConfigValueAsBoolean(app.getConfig(), KEY_APP_LIFECYCLE_HANDLER_DISABLE, false);
if (skipHandler) {
if (LOG.isDebugEnabled()) {
LOG.info("Lifecycle handler '" + handlerName + "' has been disabled. SKIPPING.");
}
return;
}
Class<?> handlerClass = null;
try {
handlerClass = loadConfigurationalClass(handlerName);
} catch (ClassNotFoundException cnfe) {
if (cnfe.getMessage().equals(handlerName)) {
// the script must not exist, do nothing
//LOGME - may be because of chained failures
return;
} else {
throw new GriffonException(cnfe);
}
}
if (Script.class.isAssignableFrom(handlerClass)) {
doRunScript(handlerName, handlerClass, app);
} else if (LifecycleHandler.class.isAssignableFrom(handlerClass)) {
doRunLifecycleHandler(handlerName, handlerClass, app);
}
}
private static void doRunScript(String scriptName, Class handlerClass, GriffonApplication app) {
Script script = (Script) safeNewInstance(handlerClass);
script.setBinding(app.getBindings());
UIThreadManager.enhance(script);
if (LOG.isInfoEnabled()) {
LOG.info("Running lifecycle handler (script) '" + scriptName + "'");
}
UIThreadManager.getInstance().executeSync(script);
}
private static void doRunLifecycleHandler(String handlerName, Class handlerClass, GriffonApplication app) {
LifecycleHandler handler = (LifecycleHandler) safeNewInstance(handlerClass);
if (LOG.isInfoEnabled()) {
LOG.info("Running lifecycle handler (class) '" + handlerName + "'");
}
UIThreadManager.getInstance().executeSync(handler);
}
/**
* Creates a new instance of the specified class.<p>
* Publishes a <strong>NewInstance</strong> event with the following arguments<ul>
* <li>klass - the target Class</li>
* <li>type - the type of the instance (i.e, 'controller','service')</li>
* <li>instance - the newly created instance</li>
* </ul>
*
* @param app the current GriffonApplication
* @param klass the target Class from which the instance will be created
* @return a newly created instance of type klass
*/
public static Object newInstance(GriffonApplication app, Class klass) {
return newInstance(app, klass, "");
}
/**
* Creates a new instance of the specified class.<p>
* Publishes a <strong>NewInstance</strong> event with the following arguments<ul>
* <li>klass - the target Class</li>
* <li>type - the type of the instance (i.e, 'controller','service')</li>
* <li>instance - the newly created instance</li>
* </ul>
*
* @param app the current GriffonApplication
* @param klass the target Class from which the instance will be created
* @param type optional type parameter, used when publishing a 'NewInstance' event
* @return a newly created instance of type klass
*/
public static Object newInstance(GriffonApplication app, Class klass, String type) {
if (isBlank(type)) type = "";
if (LOG.isDebugEnabled()) {
LOG.debug("Instantiating " + klass.getName() + " with type '" + type + "'");
}
Object instance = null;
try {
instance = klass.newInstance();
} catch (InstantiationException e) {
throw new GriffonException(e);
} catch (IllegalAccessException e) {
throw new GriffonException(e);
}
// GRIFFON-535
if (instance != null) {
GriffonClass griffonClass = app.getArtifactManager().findGriffonClass(klass);
MetaClass mc = griffonClass != null ? griffonClass.getMetaClass() : expandoMetaClassFor(klass);
enhance(app, klass, mc, instance);
app.event(GriffonApplication.Event.NEW_INSTANCE.getName(), asList(klass, type, instance));
}
return instance;
}
public static void enhance(GriffonApplication app, Class klass, MetaClass mc, Object instance) {
try {
InvokerHelper.invokeMethod(instance, "setApp", app);
} catch (MissingMethodException mme) {
try {
InvokerHelper.setProperty(instance, "app", app);
} catch (MissingPropertyException mpe) {
if (mc instanceof ExpandoMetaClass) {
((ExpandoMetaClass) mc).registerBeanProperty("app", app);
}
}
}
if (!GriffonArtifact.class.isAssignableFrom(klass)) {
UIThreadManager.enhance(mc);
}
}
public static Class<?> loadConfigurationalClass(String className) throws ClassNotFoundException {
if (!className.contains(".")) {
String fixedClassName = "config." + className;
try {
return loadClass(fixedClassName);
} catch (ClassNotFoundException cnfe) {
if (cnfe.getMessage().equals(fixedClassName)) {
return loadClass(className);
} else {
throw new GriffonException(cnfe);
}
}
}
return loadClass(className);
}
public static Class<?> loadClass(String className) throws ClassNotFoundException {
ClassNotFoundException cnfe = null;
ClassLoader cl = GriffonApplicationHelper.class.getClassLoader();
try {
return cl.loadClass(className);
} catch (ClassNotFoundException e) {
cnfe = e;
}
cl = ApplicationClassLoader.get();
try {
return cl.loadClass(className);
} catch (ClassNotFoundException e) {
cnfe = e;
}
if (cnfe != null) throw cnfe;
return null;
}
public static Class<?> safeLoadClass(String className) {
try {
return loadClass(className);
} catch (ClassNotFoundException e) {
return null;
}
}
public static Object safeNewInstance(String className) {
try {
return loadClass(className).newInstance();
} catch (Exception e) {
handleThrowable(e);
return null;
}
}
public static Object safeNewInstance(Class<?> clazz) {
return safeNewInstance(clazz, true);
}
public static Object safeNewInstance(Class<?> clazz, boolean logException) {
try {
return clazz.newInstance();
} catch (Exception e) {
if (logException) handleThrowable(e);
return null;
}
}
}
|
subprojects/griffon-rt/src/main/groovy/org/codehaus/griffon/runtime/util/GriffonApplicationHelper.java
|
/*
* Copyright 2008-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.griffon.runtime.util;
import griffon.core.*;
import griffon.core.controller.GriffonControllerAction;
import griffon.core.controller.GriffonControllerActionInterceptor;
import griffon.core.controller.GriffonControllerActionManager;
import griffon.core.factories.*;
import griffon.core.resources.ResourcesInjector;
import griffon.exceptions.GriffonException;
import griffon.util.*;
import griffon.util.logging.LogManager;
import groovy.lang.*;
import groovy.util.ConfigObject;
import groovy.util.FactoryBuilderSupport;
import org.codehaus.griffon.runtime.core.ControllerArtifactHandler;
import org.codehaus.griffon.runtime.core.ModelArtifactHandler;
import org.codehaus.griffon.runtime.core.ServiceArtifactHandler;
import org.codehaus.griffon.runtime.core.ViewArtifactHandler;
import org.codehaus.griffon.runtime.core.controller.NoopGriffonControllerActionManager;
import org.codehaus.groovy.runtime.InvokerHelper;
import org.codehaus.groovy.runtime.ResourceGroovyMethods;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.beans.PropertyEditor;
import java.beans.PropertyEditorManager;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.*;
import static griffon.util.ConfigUtils.*;
import static griffon.util.GriffonExceptionHandler.handleThrowable;
import static griffon.util.GriffonExceptionHandler.sanitize;
import static griffon.util.GriffonNameUtils.isBlank;
import static java.util.Arrays.asList;
import static org.codehaus.groovy.runtime.ResourceGroovyMethods.eachLine;
/**
* Utility class for bootstrapping an application and handling of MVC groups.</p>
*
* @author Danno Ferrin
* @author Andres Almiray
*/
public class GriffonApplicationHelper {
private static final Logger LOG = LoggerFactory.getLogger(GriffonApplicationHelper.class);
private static final Map<String, String> DEFAULT_PLATFORM_HANDLERS = CollectionUtils.<String, String>map()
.e("linux", "org.codehaus.griffon.runtime.util.DefaultLinuxPlatformHandler")
.e("linux64", "org.codehaus.griffon.runtime.util.DefaultLinuxPlatformHandler")
.e("macosx", "org.codehaus.griffon.runtime.util.DefaultMacOSXPlatformHandler")
.e("macosx64", "org.codehaus.griffon.runtime.util.DefaultMacOSXPlatformHandler")
.e("solaris", "org.codehaus.griffon.runtime.util.DefaultSolarisPlatformHandler")
.e("windows", "org.codehaus.griffon.runtime.util.DefaultWindowsPlatformHandler")
.e("windows64", "org.codehaus.griffon.runtime.util.DefaultWindowsPlatformHandler");
private static final String LOCATION_CLASSPATH = "classpath:";
private static final String LOCATION_FILE = "file:";
private static final String PROPERTIES_SUFFIX = ".properties";
private static final String GROOVY_SUFFIX = ".groovy";
private static final String KEY_MESSAGE_SOURCE_FACTORY = "app.messageSource.factory";
private static final String KEY_RESOURCES_INJECTOR_FACTORY = "app.resourceInjector.factory";
private static final String KEY_EVENT_ROUTER_FACTORY = "app.eventRouter.factory";
private static final String KEY_ADDON_MANAGER_FACTORY = "app.addonManager.factory";
private static final String KEY_ARTIFACT_MANAGER_FACTORY = "app.artifactManager.factory";
private static final String KEY_ACTION_MANAGER_FACTORY = "app.actionManager.factory";
private static final String KEY_MVCGROUP_MANAGER_FACTORY = "app.mvcGroupManager.factory";
private static final String KEY_RESOURCE_RESOLVER_FACTORY = "app.resourceResolver.factory";
private static final String KEY_LOG_MANAGER_FACTORY = "app.logManager.factory";
private static final String KEY_APP_LIFECYCLE_HANDLER_DISABLE = "app.lifecycle.handler.disable";
private static final String KEY_GRIFFON_ACTION_MANAGER_DISABLE = "griffon.action.manager.disable";
private static final String DEFAULT_MESSAGE_SOURCE_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultMessageSourceFactory";
private static final String DEFAULT_RESOURCES_INJECTOR_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultResourcesInjectorFactory";
private static final String DEFAULT_EVENT_ROUTER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultEventRouterFactory";
private static final String DEFAULT_ADDON_MANAGER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultAddonManagerFactory";
private static final String DEFAULT_ARTIFACT_MANAGER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultArtifactManagerFactory";
private static final String DEFAULT_MVCGROUP_MANAGER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultMVCGroupManagerFactory";
private static final String DEFAULT_RESOURCE_RESOLVER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultResourceResolverFactory";
private static final String DEFAULT_LOG_MANAGER_FACTORY = "org.codehaus.griffon.runtime.core.factories.DefaultLogManagerFactory";
static {
ExpandoMetaClassCreationHandle.enable();
}
/**
* Creates, register and assigns an ExpandoMetaClass for a target class.<p>
* The newly created metaClass will accept changes after initialization.
*
* @param clazz the target class
* @return an ExpandoMetaClass
*/
public static MetaClass expandoMetaClassFor(Class clazz) {
MetaClass mc = GroovySystem.getMetaClassRegistry().getMetaClass(clazz);
if (!(mc instanceof ExpandoMetaClass)) {
mc = new ExpandoMetaClass(clazz, true, true);
mc.initialize();
GroovySystem.getMetaClassRegistry().setMetaClass(clazz, mc);
}
return mc;
}
/**
* Setups an application.<p>
* This method performs the following tasks<ul>
* <li>Sets "griffon.start.dir" as system property.</li>
* <li>Calls the Initialize life cycle script.</li>
* <li>Reads runtime and builder configuration.</li>
* <li>Setups basic artifact handlers.</li>
* <li>Initializes available addons.</li>
* </ul>
*
* @param app the current Griffon application
*/
public static void prepare(GriffonApplication app) {
app.getBindings().setVariable("app", app);
Metadata.getCurrent().getGriffonStartDir();
Metadata.getCurrent().getGriffonWorkingDir();
readAndSetConfiguration(app);
app.event(GriffonApplication.Event.BOOTSTRAP_START.getName(), asList(app));
initializeMessageSource(app);
initializeResourceResolver(app);
initializeResourcesInjector(app);
initializePropertyEditors(app);
applyPlatformTweaks(app);
runLifecycleHandler(GriffonApplication.Lifecycle.INITIALIZE.getName(), app);
initializeArtifactManager(app);
initializeMvcManager(app);
initializeAddonManager(app);
initializeActionManager(app);
app.event(GriffonApplication.Event.BOOTSTRAP_END.getName(), asList(app));
}
private static ConfigObject doLoadConfig(ConfigReader configReader, Class configClass, String configFileName) {
if (configClass != null) configFileName = configClass.getSimpleName();
return loadConfig(configReader, configClass, configFileName);
}
private static ConfigObject doLoadConfigWithI18n(Locale locale, ConfigReader configReader, Class configClass, String configFileName) {
if (configClass != null) configFileName = configClass.getSimpleName();
return loadConfigWithI18n(locale, configReader, configClass, configFileName);
}
private static void readAndSetConfiguration(final GriffonApplication app) {
ConfigReader configReader = createConfigReader();
ConfigObject appConfig = doLoadConfig(configReader, app.getAppConfigClass(), GriffonApplication.Configuration.APPLICATION.getName());
setApplicationLocale(app, getConfigValue(appConfig, "application.locale", Locale.getDefault()));
appConfig = doLoadConfigWithI18n(app.getLocale(), configReader, app.getAppConfigClass(), GriffonApplication.Configuration.APPLICATION.getName());
app.setConfig(appConfig);
app.getConfig().merge(doLoadConfigWithI18n(app.getLocale(), configReader, app.getConfigClass(), GriffonApplication.Configuration.CONFIG.getName()));
initializeLogManager(app);
loadExternalConfig(app, configReader);
GriffonExceptionHandler.configure(app.getConfig().flatten(new LinkedHashMap()));
app.setBuilderConfig(doLoadConfigWithI18n(app.getLocale(), configReader, app.getBuilderClass(), GriffonApplication.Configuration.BUILDER.getName()));
initializeEventRouter(app);
Object events = safeNewInstance(app.getEventsClass(), false);
if (events != null) {
app.setEventsConfig(events);
app.addApplicationEventListener(app.getEventsConfig());
}
}
private static void loadExternalConfig(GriffonApplication app, ConfigReader configReader) {
List<String> locations = (List<String>) getConfigValue(app.getConfig(), "griffon.config.locations", Collections.emptyList());
for (String location : locations) {
boolean groovyScriptAllowed = false;
String parsedLocation = location;
if (location.startsWith(LOCATION_CLASSPATH)) {
parsedLocation = location.substring(LOCATION_CLASSPATH.length()).trim();
} else if (location.startsWith(LOCATION_FILE)) {
parsedLocation = location.substring(LOCATION_FILE.length()).trim();
} else {
// assume it's a class definition
groovyScriptAllowed = true;
}
if (groovyScriptAllowed) {
Class locationScriptClass = safeLoadClass(parsedLocation);
if (locationScriptClass != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading external configuration location '" + location + "'.");
}
app.getConfig().merge(loadConfigWithI18n(app.getLocale(), configReader, locationScriptClass, null));
} else {
// invalid location. Log & skip
if (LOG.isWarnEnabled()) {
LOG.warn("Skipping invalid external configuration location '" + location + "'.");
}
}
} else if (parsedLocation.endsWith(PROPERTIES_SUFFIX) || parsedLocation.endsWith(GROOVY_SUFFIX)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading external configuration location '" + location + "'.");
}
app.getConfig().merge(loadConfigWithI18n(app.getLocale(), configReader, null, parsedLocation));
} else {
// invalid location. Log & skip
if (LOG.isWarnEnabled()) {
LOG.warn("Skipping invalid external configuration location '" + location + "'.");
}
}
}
}
private static void initializeLogManager(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_LOG_MANAGER_FACTORY, DEFAULT_LOG_MANAGER_FACTORY);
LogManagerFactory factory = (LogManagerFactory) safeNewInstance(className);
LogManager logManager = factory.create(app);
logManager.configure(app.getConfig());
}
private static void initializeMessageSource(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_MESSAGE_SOURCE_FACTORY, DEFAULT_MESSAGE_SOURCE_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as MessageSourceFactory");
}
MessageSourceFactory factory = (MessageSourceFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "messageSource", factory.create(app));
}
private static void initializeResourceResolver(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_RESOURCE_RESOLVER_FACTORY, DEFAULT_RESOURCE_RESOLVER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as ResourceResolverFactory");
}
ResourceResolverFactory factory = (ResourceResolverFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "resourceResolver", factory.create(app));
}
private static void initializeResourcesInjector(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_RESOURCES_INJECTOR_FACTORY, DEFAULT_RESOURCES_INJECTOR_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as ResourcesInjectorFactory");
}
ResourcesInjectorFactory factory = (ResourcesInjectorFactory) safeNewInstance(className);
final ResourcesInjector injector = factory.create(app);
app.addApplicationEventListener(GriffonApplication.Event.NEW_INSTANCE.getName(), new RunnableWithArgs() {
public void run(Object[] args) {
Object instance = args[2];
injector.injectResources(instance);
}
});
}
private static void initializePropertyEditors(GriffonApplication app) {
Enumeration<URL> urls = null;
try {
urls = ApplicationClassLoader.get().getResources("META-INF/services/" + PropertyEditor.class.getName());
} catch (IOException ioe) {
return;
}
if (urls == null) return;
while (urls.hasMoreElements()) {
URL url = urls.nextElement();
if (LOG.isDebugEnabled()) {
LOG.debug("Reading " + PropertyEditor.class.getName() + " definitions from " + url);
}
try {
eachLine(url, new RunnableWithArgsClosure(new RunnableWithArgs() {
@Override
public void run(Object[] args) {
String line = (String) args[0];
if (line.startsWith("#") || isBlank(line)) return;
try {
String[] parts = line.trim().split("=");
Class targetType = loadClass(parts[0].trim());
Class editorClass = loadClass(parts[1].trim());
if (LOG.isDebugEnabled()) {
LOG.debug("Registering " + editorClass.getName() + " as editor for " + targetType.getName());
}
PropertyEditorManager.registerEditor(targetType, editorClass);
} catch (Exception e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Could not load PropertyEditor with " + line, sanitize(e));
}
}
}
}));
} catch (IOException e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Could not load PropertyEditor definitions from " + url, sanitize(e));
}
}
}
}
private static void initializeEventRouter(GriffonApplication app) {
InvokerHelper.setProperty(app, "eventRouter", createEventRouter(app));
}
public static EventRouter createEventRouter(GriffonApplication app) {
String className = getConfigValueAsString(app.getConfig(), KEY_EVENT_ROUTER_FACTORY, DEFAULT_EVENT_ROUTER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as EventRouterFactory");
}
EventRouterFactory factory = (EventRouterFactory) safeNewInstance(className);
return factory.create(app);
}
private static void setApplicationLocale(GriffonApplication app, Object localeValue) {
if (localeValue instanceof Locale) {
app.setLocale((Locale) localeValue);
} else if (localeValue instanceof CharSequence) {
app.setLocale(parseLocale(String.valueOf(localeValue)));
}
}
public static Locale parseLocale(String locale) {
if (isBlank(locale)) return Locale.getDefault();
String[] parts = locale.split("_");
switch (parts.length) {
case 1:
return new Locale(parts[0]);
case 2:
return new Locale(parts[0], parts[1]);
case 3:
return new Locale(parts[0], parts[1], parts[2]);
default:
return Locale.getDefault();
}
}
public static void applyPlatformTweaks(GriffonApplication app) {
String platform = GriffonApplicationUtils.platform;
String handlerClassName = getConfigValueAsString(app.getConfig(), "platform.handler." + platform, DEFAULT_PLATFORM_HANDLERS.get(platform));
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + handlerClassName + " as PlatformHandler");
}
PlatformHandler platformHandler = (PlatformHandler) safeNewInstance(handlerClassName);
platformHandler.handle(app);
}
private static void initializeArtifactManager(GriffonApplication app) {
if (app.getArtifactManager() == null) {
String className = getConfigValueAsString(app.getConfig(), KEY_ARTIFACT_MANAGER_FACTORY, DEFAULT_ARTIFACT_MANAGER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as ArtifactManagerFactory");
}
ArtifactManagerFactory factory = (ArtifactManagerFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "artifactManager", factory.create(app));
}
// initialize default Artifact handlers
app.getArtifactManager().registerArtifactHandler(new ModelArtifactHandler(app));
app.getArtifactManager().registerArtifactHandler(new ViewArtifactHandler(app));
app.getArtifactManager().registerArtifactHandler(new ControllerArtifactHandler(app));
if (!ServiceArtifactHandler.isBasicInjectionDisabled()) {
app.getArtifactManager().registerArtifactHandler(new ServiceArtifactHandler(app));
}
// load additional handlers
loadArtifactHandlers(app);
app.getArtifactManager().loadArtifactMetadata();
}
private static void initializeAddonManager(GriffonApplication app) {
if (app.getAddonManager() == null) {
String className = getConfigValueAsString(app.getConfig(), KEY_ADDON_MANAGER_FACTORY, DEFAULT_ADDON_MANAGER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as AddonManagerFactory");
}
AddonManagerFactory factory = (AddonManagerFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "addonManager", factory.create(app));
}
app.getAddonManager().initialize();
}
private static void initializeActionManager(GriffonApplication app) {
InvokerHelper.setProperty(app, "actionManager", new NoopGriffonControllerActionManager(app));
boolean disableActionManager = getConfigValueAsBoolean(app.getConfig(), KEY_GRIFFON_ACTION_MANAGER_DISABLE, false);
if (disableActionManager) {
if (LOG.isInfoEnabled()) {
LOG.info("GriffonControllerActionManager is disabled.");
}
return;
}
String className = getConfigValueAsString(app.getConfig(), KEY_ACTION_MANAGER_FACTORY, null);
if (isBlank(className) || "null".equals(className)) {
URL url = ApplicationClassLoader.get().getResource("META-INF/services/" + GriffonControllerActionManagerFactory.class.getName());
if (null == url) {
if (LOG.isInfoEnabled()) {
LOG.info("GriffonControllerActionManager is disabled.");
}
return;
}
try {
className = ResourceGroovyMethods.getText(url).trim();
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Cannot read GriffonControllerActionManager definition from " + url, sanitize(e));
className = null;
}
}
}
if (isBlank(className)) {
if (LOG.isInfoEnabled()) {
LOG.info("GriffonControllerActionManager is disabled.");
}
return;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as GriffonControllerActionManagerFactory");
}
GriffonControllerActionManagerFactory factory = (GriffonControllerActionManagerFactory) safeNewInstance(className);
final GriffonControllerActionManager actionManager = factory.create(app);
InvokerHelper.setProperty(app, "actionManager", actionManager);
app.addApplicationEventListener(GriffonApplication.Event.NEW_INSTANCE.getName(), new RunnableWithArgs() {
public void run(Object[] args) {
String type = (String) args[1];
if (GriffonControllerClass.TYPE.equals(type)) {
GriffonController controller = (GriffonController) args[2];
actionManager.createActions(controller);
}
}
});
app.addApplicationEventListener(GriffonApplication.Event.INITIALIZE_MVC_GROUP.getName(), new RunnableWithArgs() {
public void run(Object[] args) {
MVCGroupConfiguration groupConfig = (MVCGroupConfiguration) args[0];
MVCGroup group = (MVCGroup) args[1];
GriffonController controller = group.getController();
if (controller == null) return;
FactoryBuilderSupport builder = group.getBuilder();
Map<String, GriffonControllerAction> actions = actionManager.actionsFor(controller);
for (Map.Entry<String, GriffonControllerAction> action : actions.entrySet()) {
String actionKey = actionManager.normalizeName(action.getKey()) + GriffonControllerActionManager.ACTION;
if (LOG.isTraceEnabled()) {
LOG.trace("Adding action " + actionKey + " to " + groupConfig.getMvcType() + ":" + group.getMvcId() + ":builder");
}
builder.setVariable(actionKey, action.getValue().getToolkitAction());
}
}
});
Map<String, Map<String, Object>> actionInterceptors = new LinkedHashMap<String, Map<String, Object>>();
for (GriffonAddon addon : app.getAddonManager().getAddons().values()) {
Map<String, Map<String, Object>> interceptors = addon.getActionInterceptors();
if (interceptors != null && !interceptors.isEmpty()) {
actionInterceptors.putAll(interceptors);
}
}
List<GriffonControllerActionInterceptor> sortedInterceptors = new ArrayList<GriffonControllerActionInterceptor>();
Map<String, Map<String, Object>> map = new LinkedHashMap<String, Map<String, Object>>(actionInterceptors);
Set<String> addedDeps = new LinkedHashSet<String>();
while (!map.isEmpty()) {
int filtersAdded = 0;
if (LOG.isDebugEnabled()) {
LOG.debug("Current interceptor order is " + actionInterceptors.keySet());
}
for (Iterator<Map.Entry<String, Map<String, Object>>> iter = map.entrySet().iterator(); iter.hasNext(); ) {
Map.Entry<String, Map<String, Object>> entry = iter.next();
String interceptorName = entry.getKey();
List<String> dependsOn = (List<String>) getConfigValue(entry.getValue(), "dependsOn", Collections.emptyList());
String interceptorClassName = (String) getConfigValue(entry.getValue(), "interceptor", null);
if (LOG.isDebugEnabled()) {
LOG.debug("Processing interceptor '" + interceptorName + "'");
LOG.debug(" depends on '" + dependsOn + "'");
}
if (isBlank(interceptorClassName)) {
if (LOG.isDebugEnabled()) {
LOG.debug(" Skipped interceptor '" + interceptorName + "', since it does not define an interceptor class");
}
iter.remove();
continue;
}
if (!dependsOn.isEmpty()) {
if (LOG.isDebugEnabled()) {
LOG.debug(" Checking interceptor '" + interceptorName + "' dependencies (" + dependsOn.size() + ")");
}
boolean failedDep = false;
for (String dep : dependsOn) {
if (LOG.isDebugEnabled()) {
LOG.debug(" Checking interceptor '" + interceptorName + "' dependencies: " + dep);
}
if (!addedDeps.contains(dep)) {
// dep not in the list yet, we need to skip adding this to the list for now
if (LOG.isDebugEnabled()) {
LOG.debug(" Skipped interceptor '" + interceptorName + "', since dependency '" + dep + "' not yet added");
}
failedDep = true;
break;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(" Interceptor '" + interceptorName + "' dependency '" + dep + "' already added");
}
}
}
if (failedDep) {
// move on to next dependency
continue;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug(" Adding interceptor '" + interceptorName + "', since all dependencies have been added");
}
sortedInterceptors.add((GriffonControllerActionInterceptor) newInstance(app, safeLoadClass(interceptorClassName)));
addedDeps.add(interceptorName);
iter.remove();
filtersAdded++;
}
if (filtersAdded == 0) {
// we have a cyclical dependency, warn the user and load in the order they appeared originally
if (LOG.isWarnEnabled()) {
LOG.warn("::::::::::::::::::::::::::::::::::::::::::::::::::::::");
LOG.warn(":: Unresolved interceptor dependencies detected ::");
LOG.warn(":: Continuing with original interceptor order ::");
LOG.warn("::::::::::::::::::::::::::::::::::::::::::::::::::::::");
}
for (Map.Entry<String, Map<String, Object>> entry : map.entrySet()) {
String interceptorName = entry.getKey();
List<String> dependsOn = (List<String>) getConfigValue(entry.getValue(), "dependsOn", Collections.emptyList());
// display this as a cyclical dep
if (LOG.isWarnEnabled()) {
LOG.warn(":: Interceptor " + interceptorName);
}
if (!dependsOn.isEmpty()) {
for (String dep : dependsOn) {
if (LOG.isWarnEnabled()) {
LOG.warn(":: depends on " + dep);
}
}
} else {
// we should only have items left in the list with deps, so this should never happen
// but a wise man once said...check for true, false and otherwise...just in case
if (LOG.isWarnEnabled()) {
LOG.warn(":: Problem while resolving dependencies.");
LOG.warn(":: Unable to resolve dependency hierarchy.");
}
}
if (LOG.isWarnEnabled()) {
LOG.warn("::::::::::::::::::::::::::::::::::::::::::::::::::::::");
}
}
break;
// if we have processed all the interceptors, we are done
} else if (sortedInterceptors.size() == actionInterceptors.size()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Interceptor dependency ordering complete");
}
break;
}
}
for (GriffonControllerActionInterceptor interceptor : sortedInterceptors) {
actionManager.addActionInterceptor(interceptor);
}
}
private static void initializeMvcManager(GriffonApplication app) {
if (app.getMvcGroupManager() == null) {
String className = getConfigValueAsString(app.getConfig(), KEY_MVCGROUP_MANAGER_FACTORY, DEFAULT_MVCGROUP_MANAGER_FACTORY);
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + className + " as MVCGroupManagerFactory");
}
MVCGroupManagerFactory factory = (MVCGroupManagerFactory) safeNewInstance(className);
InvokerHelper.setProperty(app, "mvcGroupManager", factory.create(app));
}
Map<String, MVCGroupConfiguration> configurations = new LinkedHashMap<String, MVCGroupConfiguration>();
Map<String, Map<String, Object>> mvcGroups = (Map<String, Map<String, Object>>) app.getConfig().get("mvcGroups");
if (mvcGroups != null) {
for (Map.Entry<String, Map<String, Object>> groupEntry : mvcGroups.entrySet()) {
String type = groupEntry.getKey();
if (LOG.isDebugEnabled()) {
LOG.debug("Adding MVC group " + type);
}
Map<String, Object> members = groupEntry.getValue();
Map<String, Object> configMap = new LinkedHashMap<String, Object>();
Map<String, String> membersCopy = new LinkedHashMap<String, String>();
for (Object o : members.entrySet()) {
Map.Entry entry = (Map.Entry) o;
String key = String.valueOf(entry.getKey());
if ("config".equals(key) && entry.getValue() instanceof Map) {
configMap = (Map<String, Object>) entry.getValue();
} else {
membersCopy.put(key, String.valueOf(entry.getValue()));
}
}
configurations.put(type, app.getMvcGroupManager().newMVCGroupConfiguration(type, membersCopy, configMap));
}
}
app.getMvcGroupManager().initialize(configurations);
}
private static void loadArtifactHandlers(final GriffonApplication app) {
Enumeration<URL> urls = null;
try {
urls = ApplicationClassLoader.get().getResources("META-INF/services/" + ArtifactHandler.class.getName());
} catch (IOException ioe) {
return;
}
if (urls == null) return;
while (urls.hasMoreElements()) {
URL url = urls.nextElement();
if (LOG.isDebugEnabled()) {
LOG.debug("Reading " + ArtifactHandler.class.getName() + " definitions from " + url);
}
try {
eachLine(url, new RunnableWithArgsClosure(new RunnableWithArgs() {
@Override
public void run(Object[] args) {
String line = (String) args[0];
if (line.startsWith("#") || isBlank(line)) return;
try {
Class artifactHandlerClass = loadClass(line);
Constructor ctor = artifactHandlerClass.getDeclaredConstructor(GriffonApplication.class);
ArtifactHandler handler = null;
if (ctor != null) {
handler = (ArtifactHandler) ctor.newInstance(app);
} else {
handler = (ArtifactHandler) safeNewInstance(artifactHandlerClass);
}
app.getArtifactManager().registerArtifactHandler(handler);
} catch (Exception e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Could not load ArtifactHandler with " + line, sanitize(e));
}
}
}
}));
} catch (IOException e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Could not load ArtifactHandler from " + url, sanitize(e));
}
}
}
}
/**
* Executes a script inside the UI Thread.<p>
* On Swing this would be the Event Dispatch Thread.
*/
public static void runLifecycleHandler(String handlerName, GriffonApplication app) {
boolean skipHandler = getConfigValueAsBoolean(app.getConfig(), KEY_APP_LIFECYCLE_HANDLER_DISABLE, false);
if (skipHandler) {
if (LOG.isDebugEnabled()) {
LOG.info("Lifecycle handler '" + handlerName + "' has been disabled. SKIPPING.");
}
return;
}
Class<?> handlerClass = null;
try {
handlerClass = loadConfigurationalClass(handlerName);
} catch (ClassNotFoundException cnfe) {
if (cnfe.getMessage().equals(handlerName)) {
// the script must not exist, do nothing
//LOGME - may be because of chained failures
return;
} else {
throw new GriffonException(cnfe);
}
}
if (Script.class.isAssignableFrom(handlerClass)) {
doRunScript(handlerName, handlerClass, app);
} else if (LifecycleHandler.class.isAssignableFrom(handlerClass)) {
doRunLifecycleHandler(handlerName, handlerClass, app);
}
}
private static void doRunScript(String scriptName, Class handlerClass, GriffonApplication app) {
Script script = (Script) safeNewInstance(handlerClass);
script.setBinding(app.getBindings());
UIThreadManager.enhance(script);
if (LOG.isInfoEnabled()) {
LOG.info("Running lifecycle handler (script) '" + scriptName + "'");
}
UIThreadManager.getInstance().executeSync(script);
}
private static void doRunLifecycleHandler(String handlerName, Class handlerClass, GriffonApplication app) {
LifecycleHandler handler = (LifecycleHandler) safeNewInstance(handlerClass);
if (LOG.isInfoEnabled()) {
LOG.info("Running lifecycle handler (class) '" + handlerName + "'");
}
UIThreadManager.getInstance().executeSync(handler);
}
/**
* Creates a new instance of the specified class.<p>
* Publishes a <strong>NewInstance</strong> event with the following arguments<ul>
* <li>klass - the target Class</li>
* <li>type - the type of the instance (i.e, 'controller','service')</li>
* <li>instance - the newly created instance</li>
* </ul>
*
* @param app the current GriffonApplication
* @param klass the target Class from which the instance will be created
* @return a newly created instance of type klass
*/
public static Object newInstance(GriffonApplication app, Class klass) {
return newInstance(app, klass, "");
}
/**
* Creates a new instance of the specified class.<p>
* Publishes a <strong>NewInstance</strong> event with the following arguments<ul>
* <li>klass - the target Class</li>
* <li>type - the type of the instance (i.e, 'controller','service')</li>
* <li>instance - the newly created instance</li>
* </ul>
*
* @param app the current GriffonApplication
* @param klass the target Class from which the instance will be created
* @param type optional type parameter, used when publishing a 'NewInstance' event
* @return a newly created instance of type klass
*/
public static Object newInstance(GriffonApplication app, Class klass, String type) {
if (isBlank(type)) type = "";
if (LOG.isDebugEnabled()) {
LOG.debug("Instantiating " + klass.getName() + " with type '" + type + "'");
}
Object instance = null;
try {
instance = klass.newInstance();
} catch (InstantiationException e) {
throw new GriffonException(e);
} catch (IllegalAccessException e) {
throw new GriffonException(e);
}
// GRIFFON-535
if (instance != null) {
GriffonClass griffonClass = app.getArtifactManager().findGriffonClass(klass);
MetaClass mc = griffonClass != null ? griffonClass.getMetaClass() : expandoMetaClassFor(klass);
enhance(app, klass, mc, instance);
app.event(GriffonApplication.Event.NEW_INSTANCE.getName(), asList(klass, type, instance));
}
return instance;
}
public static void enhance(GriffonApplication app, Class klass, MetaClass mc, Object instance) {
try {
InvokerHelper.invokeMethod(instance, "setApp", app);
} catch (MissingMethodException mme) {
try {
InvokerHelper.setProperty(instance, "app", app);
} catch (MissingPropertyException mpe) {
if (mc instanceof ExpandoMetaClass) {
((ExpandoMetaClass) mc).registerBeanProperty("app", app);
}
}
}
if (!GriffonArtifact.class.isAssignableFrom(klass)) {
UIThreadManager.enhance(mc);
}
}
public static Class<?> loadConfigurationalClass(String className) throws ClassNotFoundException {
if (!className.contains(".")) {
String fixedClassName = "config." + className;
try {
return loadClass(fixedClassName);
} catch (ClassNotFoundException cnfe) {
if (cnfe.getMessage().equals(fixedClassName)) {
return loadClass(className);
} else {
throw new GriffonException(cnfe);
}
}
}
return loadClass(className);
}
public static Class<?> loadClass(String className) throws ClassNotFoundException {
ClassNotFoundException cnfe = null;
ClassLoader cl = GriffonApplicationHelper.class.getClassLoader();
try {
return cl.loadClass(className);
} catch (ClassNotFoundException e) {
cnfe = e;
}
cl = ApplicationClassLoader.get();
try {
return cl.loadClass(className);
} catch (ClassNotFoundException e) {
cnfe = e;
}
if (cnfe != null) throw cnfe;
return null;
}
public static Class<?> safeLoadClass(String className) {
try {
return loadClass(className);
} catch (ClassNotFoundException e) {
return null;
}
}
public static Object safeNewInstance(String className) {
try {
return loadClass(className).newInstance();
} catch (Exception e) {
handleThrowable(e);
return null;
}
}
public static Object safeNewInstance(Class<?> clazz) {
return safeNewInstance(clazz, true);
}
public static Object safeNewInstance(Class<?> clazz, boolean logException) {
try {
return clazz.newInstance();
} catch (Exception e) {
if (logException) handleThrowable(e);
return null;
}
}
}
|
Allow action interceptor order to be suggested by applications
|
subprojects/griffon-rt/src/main/groovy/org/codehaus/griffon/runtime/util/GriffonApplicationHelper.java
|
Allow action interceptor order to be suggested by applications
|
|
Java
|
apache-2.0
|
89ef5727a7bf957463491bb1220a7f4bd7af3150
| 0
|
Alluxio/alluxio,Reidddddd/mo-alluxio,Alluxio/alluxio,jsimsa/alluxio,jswudi/alluxio,ShailShah/alluxio,aaudiber/alluxio,apc999/alluxio,calvinjia/tachyon,wwjiang007/alluxio,jsimsa/alluxio,uronce-cc/alluxio,yuluo-ding/alluxio,riversand963/alluxio,PasaLab/tachyon,ShailShah/alluxio,uronce-cc/alluxio,ChangerYoung/alluxio,aaudiber/alluxio,Reidddddd/mo-alluxio,bf8086/alluxio,yuluo-ding/alluxio,EvilMcJerkface/alluxio,bf8086/alluxio,EvilMcJerkface/alluxio,yuluo-ding/alluxio,jswudi/alluxio,Alluxio/alluxio,wwjiang007/alluxio,PasaLab/tachyon,yuluo-ding/alluxio,maboelhassan/alluxio,maobaolong/alluxio,riversand963/alluxio,madanadit/alluxio,ChangerYoung/alluxio,apc999/alluxio,ShailShah/alluxio,maobaolong/alluxio,maobaolong/alluxio,jswudi/alluxio,apc999/alluxio,calvinjia/tachyon,ChangerYoung/alluxio,maboelhassan/alluxio,PasaLab/tachyon,ShailShah/alluxio,madanadit/alluxio,jsimsa/alluxio,uronce-cc/alluxio,maobaolong/alluxio,apc999/alluxio,madanadit/alluxio,uronce-cc/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,ShailShah/alluxio,bf8086/alluxio,yuluo-ding/alluxio,wwjiang007/alluxio,Alluxio/alluxio,WilliamZapata/alluxio,WilliamZapata/alluxio,Reidddddd/alluxio,bf8086/alluxio,riversand963/alluxio,Reidddddd/mo-alluxio,jsimsa/alluxio,jswudi/alluxio,maobaolong/alluxio,maobaolong/alluxio,maobaolong/alluxio,bf8086/alluxio,ChangerYoung/alluxio,jswudi/alluxio,madanadit/alluxio,bf8086/alluxio,aaudiber/alluxio,maboelhassan/alluxio,WilliamZapata/alluxio,WilliamZapata/alluxio,PasaLab/tachyon,apc999/alluxio,yuluo-ding/alluxio,ShailShah/alluxio,PasaLab/tachyon,Alluxio/alluxio,madanadit/alluxio,PasaLab/tachyon,Alluxio/alluxio,riversand963/alluxio,EvilMcJerkface/alluxio,maboelhassan/alluxio,maobaolong/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,jswudi/alluxio,ChangerYoung/alluxio,Alluxio/alluxio,Reidddddd/mo-alluxio,riversand963/alluxio,jsimsa/alluxio,Reidddddd/alluxio,ChangerYoung/alluxio,madanadit/alluxio,calvinjia/tachyon,wwjiang007/alluxio,Reidddddd/alluxio,WilliamZapata/alluxio,Reidddddd/alluxio,aaudiber/alluxio,uronce-cc/alluxio,PasaLab/tachyon,maboelhassan/alluxio,Alluxio/alluxio,madanadit/alluxio,Reidddddd/alluxio,aaudiber/alluxio,Alluxio/alluxio,calvinjia/tachyon,riversand963/alluxio,wwjiang007/alluxio,aaudiber/alluxio,wwjiang007/alluxio,EvilMcJerkface/alluxio,apc999/alluxio,WilliamZapata/alluxio,madanadit/alluxio,Reidddddd/mo-alluxio,Reidddddd/alluxio,Reidddddd/alluxio,calvinjia/tachyon,uronce-cc/alluxio,bf8086/alluxio,calvinjia/tachyon,calvinjia/tachyon,Reidddddd/mo-alluxio,maboelhassan/alluxio,maboelhassan/alluxio,aaudiber/alluxio,jsimsa/alluxio,wwjiang007/alluxio,calvinjia/tachyon,maobaolong/alluxio,wwjiang007/alluxio,bf8086/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,apc999/alluxio
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.yarn;
import alluxio.Configuration;
import alluxio.Constants;
import alluxio.util.FormatUtils;
import alluxio.util.io.PathUtils;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.yarn.YarnUtils.YarnContainerType;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
import org.apache.hadoop.yarn.client.api.NMClient;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync.CallbackHandler;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.Records;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import javax.annotation.concurrent.NotThreadSafe;
/**
* Actual owner of Alluxio running on Yarn. The YARN ResourceManager will launch this
* ApplicationMaster on an allocated container. The ApplicationMaster communicates with the YARN
* cluster, and handles application execution. It performs operations asynchronously.
*/
@NotThreadSafe
public final class ApplicationMaster implements AMRMClientAsync.CallbackHandler {
private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE);
/**
* Resources needed by the master and worker containers. Yarn will copy these to the container
* before running the container's command.
*/
private static final List<String> LOCAL_RESOURCE_NAMES =
Lists.newArrayList(YarnUtils.ALLUXIO_TARBALL, YarnUtils.ALLUXIO_SETUP_SCRIPT);
/* Parameters sent from Client. */
private final int mMasterCpu;
private final int mWorkerCpu;
private final int mMasterMemInMB;
private final int mWorkerMemInMB;
private final int mRamdiskMemInMB;
private final int mNumWorkers;
private final String mMasterAddress;
private final int mMaxWorkersPerHost;
private final String mResourcePath;
private final YarnConfiguration mYarnConf = new YarnConfiguration();
/** The count starts at 1, then becomes 0 when the application is done. */
private final CountDownLatch mApplicationDoneLatch;
/** Client to talk to Resource Manager. */
private final AMRMClientAsync<ContainerRequest> mRMClient;
/** Client to talk to Node Manager. */
private final NMClient mNMClient;
/** Client Resource Manager Service. */
private final YarnClient mYarnClient;
/** Network address of the container allocated for Alluxio master. */
private String mMasterContainerNetAddress;
private volatile ContainerAllocator mContainerAllocator;
/**
* A factory which creates an AMRMClientAsync with a heartbeat interval and callback handler.
*/
public interface AMRMClientAsyncFactory {
/**
* @param heartbeatMs the interval at which to send heartbeats to the resource manager
* @param handler a handler for callbacks from the resource manager
* @return a client for making requests to the resource manager
*/
AMRMClientAsync<ContainerRequest> createAMRMClientAsync(int heartbeatMs,
CallbackHandler handler);
}
/**
* Convenience constructor which uses the default Alluxio configuration.
*
* @param numWorkers the number of workers to launch
* @param masterAddress the address at which to start the Alluxio master
* @param resourcePath an hdfs path shared by all yarn nodes which can be used to share resources
*/
public ApplicationMaster(int numWorkers, String masterAddress, String resourcePath) {
this(numWorkers, masterAddress, resourcePath, YarnClient.createYarnClient(),
NMClient.createNMClient(), new AMRMClientAsyncFactory() {
@Override
public AMRMClientAsync<ContainerRequest> createAMRMClientAsync(int heartbeatMs,
CallbackHandler handler) {
return AMRMClientAsync.createAMRMClientAsync(heartbeatMs, handler);
}
});
}
/**
* Constructs an {@link ApplicationMaster}.
*
* Clients will be initialized and started during the {@link #start()} method.
*
* @param numWorkers the number of workers to launch
* @param masterAddress the address at which to start the Alluxio master
* @param resourcePath an hdfs path shared by all yarn nodes which can be used to share resources
* @param yarnClient the client to use for communicating with Yarn
* @param nMClient the client to use for communicating with the node manager
* @param amrmFactory a factory for creating an {@link AMRMClientAsync}
*/
public ApplicationMaster(int numWorkers, String masterAddress, String resourcePath,
YarnClient yarnClient, NMClient nMClient, AMRMClientAsyncFactory amrmFactory) {
mMasterCpu = Configuration.getInt(Constants.INTEGRATION_MASTER_RESOURCE_CPU);
mMasterMemInMB =
(int) (Configuration.getBytes(Constants.INTEGRATION_MASTER_RESOURCE_MEM) / Constants.MB);
mWorkerCpu = Configuration.getInt(Constants.INTEGRATION_WORKER_RESOURCE_CPU);
// TODO(binfan): request worker container and ramdisk container separately
// memory for running worker
mWorkerMemInMB =
(int) (Configuration.getBytes(Constants.INTEGRATION_WORKER_RESOURCE_MEM) / Constants.MB);
// memory for running ramdisk
mRamdiskMemInMB = (int) (Configuration.getBytes(Constants.WORKER_MEMORY_SIZE) / Constants.MB);
mMaxWorkersPerHost = Configuration.getInt(Constants.INTEGRATION_YARN_WORKERS_PER_HOST_MAX);
mNumWorkers = numWorkers;
mMasterAddress = masterAddress;
mResourcePath = resourcePath;
mApplicationDoneLatch = new CountDownLatch(1);
mYarnClient = yarnClient;
mNMClient = nMClient;
// Heartbeat to the resource manager every 500ms.
mRMClient = amrmFactory.createAMRMClientAsync(500, this);
}
/**
* @param args Command line arguments to launch application master
*/
public static void main(String[] args) {
Options options = new Options();
options.addOption("num_workers", true, "Number of Alluxio workers to launch. Default 1");
options.addOption("master_address", true, "(Required) Address to run Alluxio master");
options.addOption("resource_path", true,
"(Required) HDFS path containing the Application Master");
try {
LOG.info("Starting Application Master with args {}", Arrays.toString(args));
CommandLine cliParser = new GnuParser().parse(options, args);
int numWorkers = Integer.parseInt(cliParser.getOptionValue("num_workers", "1"));
String masterAddress = cliParser.getOptionValue("master_address");
String resourcePath = cliParser.getOptionValue("resource_path");
ApplicationMaster applicationMaster =
new ApplicationMaster(numWorkers, masterAddress, resourcePath);
applicationMaster.start();
applicationMaster.requestAndLaunchContainers();
applicationMaster.waitForShutdown();
applicationMaster.stop();
} catch (Exception e) {
LOG.error("Error running Application Master", e);
System.exit(1);
}
}
@Override
public void onContainersAllocated(List<Container> containers) {
for (Container container : containers) {
mContainerAllocator.allocateContainer(container);
}
}
@Override
public void onContainersCompleted(List<ContainerStatus> statuses) {
for (ContainerStatus status : statuses) {
// Releasing worker containers because we already have workers on their host will generate a
// callback to this method, so we use debug instead of error.
if (status.getExitStatus() == ContainerExitStatus.ABORTED) {
LOG.debug("Aborted container {}", status.getContainerId());
} else {
LOG.error("Container {} completed with exit status {}", status.getContainerId(),
status.getExitStatus());
}
}
}
@Override
public void onNodesUpdated(List<NodeReport> updated) {}
@Override
public void onShutdownRequest() {
mApplicationDoneLatch.countDown();
}
@Override
public void onError(Throwable t) {
LOG.error("Error reported by resource manager", t);
}
@Override
public float getProgress() {
return 0;
}
/**
* Starts the application master.
*
* @throws IOException if registering the application master fails due to an IO error
* @throws YarnException if registering the application master fails due to an internal Yarn error
*/
public void start() throws IOException, YarnException {
mNMClient.init(mYarnConf);
mNMClient.start();
mRMClient.init(mYarnConf);
mRMClient.start();
mYarnClient.init(mYarnConf);
mYarnClient.start();
// Register with ResourceManager
String hostname = NetworkAddressUtils.getLocalHostName();
mRMClient.registerApplicationMaster(hostname, 0 /* port */, "" /* tracking url */);
LOG.info("ApplicationMaster registered");
}
/**
* Submits requests for containers until the master and all workers are launched.
*
* @throws Exception if an error occurs while requesting or launching containers
*/
public void requestAndLaunchContainers() throws Exception {
Resource masterResource = Records.newRecord(Resource.class);
masterResource.setMemory(mMasterMemInMB);
masterResource.setVirtualCores(mMasterCpu);
mContainerAllocator = new ContainerAllocator("master", 1, 1, masterResource, mYarnClient,
mRMClient, mMasterAddress);
List<Container> masterContainers = mContainerAllocator.allocateContainers();
launchMasterContainer(Iterables.getOnlyElement(masterContainers));
Resource workerResource = Records.newRecord(Resource.class);
workerResource.setMemory(mWorkerMemInMB + mRamdiskMemInMB);
workerResource.setVirtualCores(mWorkerCpu);
mContainerAllocator = new ContainerAllocator("worker", mNumWorkers, mMaxWorkersPerHost,
workerResource, mYarnClient, mRMClient);
List<Container> workerContainers = mContainerAllocator.allocateContainers();
for (Container container : workerContainers) {
launchWorkerContainer(container);
}
LOG.info("Master and workers are launched");
}
/**
* @throws InterruptedException if interrupted while awaiting shutdown
*/
public void waitForShutdown() throws InterruptedException {
mApplicationDoneLatch.await();
}
/**
* Shuts down the application master, unregistering it from Yarn and stopping its clients.
*/
public void stop() {
try {
mRMClient.unregisterApplicationMaster(FinalApplicationStatus.SUCCEEDED, "", "");
} catch (YarnException e) {
LOG.error("Failed to unregister application", e);
} catch (IOException e) {
LOG.error("Failed to unregister application", e);
}
mRMClient.stop();
// TODO(andrew): Think about whether we should stop mNMClient here
mYarnClient.stop();
}
private void launchMasterContainer(Container container) {
String command = YarnUtils.buildCommand(YarnContainerType.ALLUXIO_MASTER);
try {
ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class);
ctx.setCommands(Lists.newArrayList(command));
ctx.setLocalResources(setupLocalResources(mResourcePath));
ctx.setEnvironment(setupMasterEnvironment());
LOG.info("Launching container {} for Alluxio master on {} with master command: {}",
container.getId(), container.getNodeHttpAddress(), command);
mNMClient.startContainer(container, ctx);
String containerUri = container.getNodeHttpAddress(); // in the form of 1.2.3.4:8042
mMasterContainerNetAddress = containerUri.split(":")[0];
LOG.info("Master address: {}", mMasterContainerNetAddress);
return;
} catch (Exception e) {
LOG.error("Error launching container {}", container.getId(), e);
}
}
private void launchWorkerContainer(Container container) {
String command = YarnUtils.buildCommand(YarnContainerType.ALLUXIO_WORKER);
ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class);
ctx.setCommands(Lists.newArrayList(command));
ctx.setLocalResources(setupLocalResources(mResourcePath));
ctx.setEnvironment(setupWorkerEnvironment(mMasterContainerNetAddress, mRamdiskMemInMB));
try {
LOG.info("Launching container {} for Alluxio worker on {} with worker command: {}",
container.getId(), container.getNodeHttpAddress(), command);
mNMClient.startContainer(container, ctx);
} catch (Exception e) {
LOG.error("Error launching container {}", container.getId(), e);
}
}
private static Map<String, LocalResource> setupLocalResources(String resourcePath) {
try {
Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
for (String resourceName : LOCAL_RESOURCE_NAMES) {
localResources.put(resourceName, YarnUtils.createLocalResourceOfFile(
new YarnConfiguration(), PathUtils.concatPath(resourcePath, resourceName)));
}
return localResources;
} catch (IOException e) {
throw new RuntimeException("Cannot find resource", e);
}
}
private static Map<String, String> setupMasterEnvironment() {
return setupCommonEnvironment();
}
private static Map<String, String> setupWorkerEnvironment(String masterContainerNetAddress,
int ramdiskMemInMB) {
Map<String, String> env = setupCommonEnvironment();
env.put("ALLUXIO_MASTER_HOSTNAME", masterContainerNetAddress);
env.put("ALLUXIO_WORKER_MEMORY_SIZE",
FormatUtils.getSizeFromBytes((long) ramdiskMemInMB * Constants.MB));
return env;
}
private static Map<String, String> setupCommonEnvironment() {
// Setup the environment needed for the launch context.
Map<String, String> env = new HashMap<String, String>();
env.put("ALLUXIO_HOME", ApplicationConstants.Environment.PWD.$());
return env;
}
}
|
integration/yarn/src/main/java/alluxio/yarn/ApplicationMaster.java
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 (the
* "License"). You may not use this work except in compliance with the License, which is available
* at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.yarn;
import alluxio.Configuration;
import alluxio.Constants;
import alluxio.util.FormatUtils;
import alluxio.util.io.PathUtils;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.yarn.YarnUtils.YarnContainerType;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
import org.apache.hadoop.yarn.client.api.NMClient;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync.CallbackHandler;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.Records;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import javax.annotation.concurrent.NotThreadSafe;
/**
* Actual owner of Alluxio running on Yarn. The YARN ResourceManager will launch this
* ApplicationMaster on an allocated container. The ApplicationMaster communicates with the YARN
* cluster, and handles application execution. It performs operations asynchronously.
*/
@NotThreadSafe
public final class ApplicationMaster implements AMRMClientAsync.CallbackHandler {
private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE);
/**
* Resources needed by the master and worker containers. Yarn will copy these to the container
* before running the container's command.
*/
private static final List<String> LOCAL_RESOURCE_NAMES =
Lists.newArrayList(YarnUtils.ALLUXIO_TARBALL, YarnUtils.ALLUXIO_SETUP_SCRIPT);
/* Parameters sent from Client. */
private final int mMasterCpu;
private final int mWorkerCpu;
private final int mMasterMemInMB;
private final int mWorkerMemInMB;
private final int mRamdiskMemInMB;
private final int mNumWorkers;
private final String mMasterAddress;
private final int mMaxWorkersPerHost;
private final String mResourcePath;
private final YarnConfiguration mYarnConf = new YarnConfiguration();
/** The count starts at 1, then becomes 0 when the application is done. */
private final CountDownLatch mApplicationDoneLatch;
/** Client to talk to Resource Manager. */
private final AMRMClientAsync<ContainerRequest> mRMClient;
/** Client to talk to Node Manager. */
private final NMClient mNMClient;
/** Client Resource Manager Service. */
private final YarnClient mYarnClient;
/** Network address of the container allocated for Alluxio master. */
private String mMasterContainerNetAddress;
private volatile ContainerAllocator mContainerAllocator;
/**
* A factory which creates an AMRMClientAsync with a heartbeat interval and callback handler.
*/
public interface AMRMClientAsyncFactory {
/**
* @param heartbeatMs the interval at which to send heartbeats to the resource manager
* @param handler a handler for callbacks from the resource manager
* @return a client for making requests to the resource manager
*/
AMRMClientAsync<ContainerRequest> createAMRMClientAsync(int heartbeatMs,
CallbackHandler handler);
}
/**
* Convenience constructor which uses the default Alluxio configuration.
*
* @param numWorkers the number of workers to launch
* @param masterAddress the address at which to start the Alluxio master
* @param resourcePath an hdfs path shared by all yarn nodes which can be used to share resources
*/
public ApplicationMaster(int numWorkers, String masterAddress, String resourcePath) {
this(numWorkers, masterAddress, resourcePath, YarnClient.createYarnClient(),
NMClient.createNMClient(), new AMRMClientAsyncFactory() {
@Override
public AMRMClientAsync<ContainerRequest> createAMRMClientAsync(int heartbeatMs,
CallbackHandler handler) {
return AMRMClientAsync.createAMRMClientAsync(heartbeatMs, handler);
}
});
}
/**
* Constructs an {@link ApplicationMaster}.
*
* Clients will be initialized and started during the {@link #start()} method.
*
* @param numWorkers the number of workers to launch
* @param masterAddress the address at which to start the Alluxio master
* @param resourcePath an hdfs path shared by all yarn nodes which can be used to share resources
* @param yarnClient the client to use for communicating with Yarn
* @param nMClient the client to use for communicating with the node manager
* @param amrmFactory a factory for creating an {@link AMRMClientAsync}
*/
public ApplicationMaster(int numWorkers, String masterAddress, String resourcePath,
YarnClient yarnClient, NMClient nMClient, AMRMClientAsyncFactory amrmFactory) {
mMasterCpu = Configuration.getInt(Constants.INTEGRATION_MASTER_RESOURCE_CPU);
mMasterMemInMB =
(int) (Configuration.getBytes(Constants.INTEGRATION_MASTER_RESOURCE_MEM) / Constants.MB);
mWorkerCpu = Configuration.getInt(Constants.INTEGRATION_WORKER_RESOURCE_CPU);
// TODO(binfan): request worker container and ramdisk container separately
// memory for running worker
mWorkerMemInMB =
(int) (Configuration.getBytes(Constants.INTEGRATION_WORKER_RESOURCE_MEM) / Constants.MB);
// memory for running ramdisk
mRamdiskMemInMB = (int) (Configuration.getBytes(Constants.WORKER_MEMORY_SIZE) / Constants.MB);
mMaxWorkersPerHost = Configuration.getInt(Constants.INTEGRATION_YARN_WORKERS_PER_HOST_MAX);
mNumWorkers = numWorkers;
mMasterAddress = masterAddress;
mResourcePath = resourcePath;
mApplicationDoneLatch = new CountDownLatch(1);
mYarnClient = yarnClient;
mNMClient = nMClient;
// Heartbeat to the resource manager every 500ms.
mRMClient = amrmFactory.createAMRMClientAsync(500, this);
}
/**
* @param args Command line arguments to launch application master
*/
public static void main(String[] args) {
Options options = new Options();
options.addOption("num_workers", true, "Number of Alluxio workers to launch. Default 1");
options.addOption("master_address", true, "(Required) Address to run Alluxio master");
options.addOption("resource_path", true,
"(Required) HDFS path containing the Application Master");
try {
LOG.info("Starting Application Master with args {}", Arrays.toString(args));
CommandLine cliParser = new GnuParser().parse(options, args);
int numWorkers = Integer.parseInt(cliParser.getOptionValue("num_workers", "1"));
String masterAddress = cliParser.getOptionValue("master_address");
String resourcePath = cliParser.getOptionValue("resource_path");
ApplicationMaster applicationMaster =
new ApplicationMaster(numWorkers, masterAddress, resourcePath);
applicationMaster.start();
applicationMaster.requestAndLaunchContainers();
applicationMaster.waitForShutdown();
applicationMaster.stop();
} catch (Exception e) {
LOG.error("Error running Application Master", e);
System.exit(1);
}
}
@Override
public void onContainersAllocated(List<Container> containers) {
for (Container container : containers) {
mContainerAllocator.allocateContainer(container);
}
}
@Override
public void onContainersCompleted(List<ContainerStatus> statuses) {
for (ContainerStatus status : statuses) {
// Releasing worker containers because we already have workers on their host will generate a
// callback to this method, so we use debug instead of error.
if (status.getExitStatus() == ContainerExitStatus.ABORTED) {
LOG.debug("Aborted container {}", status.getContainerId());
} else {
LOG.error("Container {} completed with exit status {}", status.getContainerId(),
status.getExitStatus());
}
}
}
@Override
public void onNodesUpdated(List<NodeReport> updated) {}
@Override
public void onShutdownRequest() {
mApplicationDoneLatch.countDown();
}
@Override
public void onError(Throwable t) {
LOG.error("Error reported by resource manager", t);
}
@Override
public float getProgress() {
return 0;
}
/**
* Starts the application master.
*
* @throws IOException if registering the application master fails due to an IO error
* @throws YarnException if registering the application master fails due to an internal Yarn error
*/
public void start() throws IOException, YarnException {
mNMClient.init(mYarnConf);
mNMClient.start();
mRMClient.init(mYarnConf);
mRMClient.start();
mYarnClient.init(mYarnConf);
mYarnClient.start();
// Register with ResourceManager
String hostname = NetworkAddressUtils.getLocalHostName();
mRMClient.registerApplicationMaster(hostname, 0 /* port */, "" /* tracking url */);
LOG.info("ApplicationMaster registered");
}
/**
* Submits requests for containers until the master and all workers are launched.
*
* @throws Exception if an error occurs while requesting or launching containers
*/
public void requestAndLaunchContainers() throws Exception {
Resource masterResource = Records.newRecord(Resource.class);
masterResource.setMemory(mMasterMemInMB);
masterResource.setVirtualCores(mMasterCpu);
mContainerAllocator = new ContainerAllocator("master", 1, 1, masterResource, mYarnClient,
mRMClient, mMasterAddress);
List<Container> masterContainers = mContainerAllocator.allocateContainers();
launchMasterContainer(Iterables.getOnlyElement(masterContainers));
Resource workerResource = Records.newRecord(Resource.class);
workerResource.setMemory(mWorkerMemInMB + mRamdiskMemInMB);
workerResource.setVirtualCores(mWorkerCpu);
mContainerAllocator = new ContainerAllocator("worker", mNumWorkers, mMaxWorkersPerHost,
workerResource, mYarnClient, mRMClient);
List<Container> workerContainers = mContainerAllocator.allocateContainers();
for (Container container : workerContainers) {
launchWorkerContainer(container);
}
LOG.info("Master and workers are launched");
}
/**
* @throws InterruptedException if interrupted while awaiting shutdown
*/
public void waitForShutdown() throws InterruptedException {
mApplicationDoneLatch.await();
}
/**
* Shuts down the application master, unregistering it from Yarn and stopping its clients.
*/
public void stop() {
try {
mRMClient.unregisterApplicationMaster(FinalApplicationStatus.SUCCEEDED, "", "");
} catch (YarnException e) {
LOG.error("Failed to unregister application", e);
} catch (IOException e) {
LOG.error("Failed to unregister application", e);
}
mRMClient.stop();
// TODO(andrew): Think about whether we should stop mNMClient here
mYarnClient.stop();
}
private void launchMasterContainer(Container container) {
String command = YarnUtils.buildCommand(YarnContainerType.ALLUXIO_MASTER);
try {
ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class);
ctx.setCommands(Lists.newArrayList(command));
ctx.setLocalResources(setupLocalResources(mResourcePath));
ctx.setEnvironment(setupMasterEnvironment());
LOG.info("Launching container {} for Alluxio master on {} with master command: {}",
container.getId(), container.getNodeHttpAddress(), command);
mNMClient.startContainer(container, ctx);
String containerUri = container.getNodeHttpAddress(); // in the form of 1.2.3.4:8042
mMasterContainerNetAddress = containerUri.split(":")[0];
LOG.info("Master address: {}", mMasterContainerNetAddress);
return;
} catch (Exception e) {
LOG.error("Error launching container {}", container.getId(), e);
}
}
private void launchWorkerContainer(Container container) {
String command = YarnUtils.buildCommand(YarnContainerType.ALLUXIO_WORKER);
ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class);
ctx.setCommands(Lists.newArrayList(command));
ctx.setLocalResources(setupLocalResources(mResourcePath));
ctx.setEnvironment(setupWorkerEnvironment(mMasterContainerNetAddress, mRamdiskMemInMB));
try {
LOG.info("Launching container {} for Alluxio worker on {} with worker command: {}",
container.getId(), container.getNodeHttpAddress(), command);
mNMClient.startContainer(container, ctx);
} catch (Exception e) {
LOG.error("Error launching container {}", container.getId(), e);
}
}
private static Map<String, LocalResource> setupLocalResources(String resourcePath) {
try {
Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
for (String resourceName : LOCAL_RESOURCE_NAMES) {
localResources.put(resourceName, YarnUtils.createLocalResourceOfFile(
new YarnConfiguration(), PathUtils.concatPath(resourcePath, resourceName)));
}
return localResources;
} catch (IOException e) {
throw new RuntimeException("Cannot find resource", e);
}
}
private static Map<String, String> setupMasterEnvironment() {
return setupCommonEnvironment();
}
private static Map<String, String> setupWorkerEnvironment(String masterContainerNetAddress,
int ramdiskMemInMB) {
Map<String, String> env = setupCommonEnvironment();
env.put("ALLUXIO_MASTER_HOSTNAME", masterContainerNetAddress);
env.put("ALLUXIO_WORKER_MEMORY_SIZE",
FormatUtils.getSizeFromBytes((long) ramdiskMemInMB * Constants.MB));
return env;
}
private static Map<String, String> setupCommonEnvironment() {
// Setup the environment needed for the launch context.
Map<String, String> env = new HashMap<String, String>();
env.put("ALLUXIO_HOME", ApplicationConstants.Environment.PWD.$());
return env;
}
}
|
Fix license
|
integration/yarn/src/main/java/alluxio/yarn/ApplicationMaster.java
|
Fix license
|
|
Java
|
apache-2.0
|
b1f303248aff29fdec5e0fa9a9ee8f9c54be22d8
| 0
|
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
|
package io.quarkus.smallrye.health.deployment;
import static io.quarkus.arc.processor.Annotations.containsAny;
import static io.quarkus.arc.processor.Annotations.getAnnotations;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BooleanSupplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.health.Liveness;
import org.eclipse.microprofile.health.Readiness;
import org.eclipse.microprofile.health.Startup;
import org.eclipse.microprofile.health.spi.HealthCheckResponseProvider;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationTarget;
import org.jboss.jandex.AnnotationTarget.Kind;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.DotName;
import org.jboss.jandex.IndexView;
import org.jboss.logging.Logger;
import io.quarkus.arc.deployment.AdditionalBeanBuildItem;
import io.quarkus.arc.deployment.AnnotationsTransformerBuildItem;
import io.quarkus.arc.deployment.BeanArchiveIndexBuildItem;
import io.quarkus.arc.deployment.BeanDefiningAnnotationBuildItem;
import io.quarkus.arc.deployment.CustomScopeAnnotationsBuildItem;
import io.quarkus.arc.processor.AnnotationsTransformer;
import io.quarkus.arc.processor.BuiltinScope;
import io.quarkus.arc.processor.DotNames;
import io.quarkus.deployment.Capabilities;
import io.quarkus.deployment.Capability;
import io.quarkus.deployment.Feature;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.FeatureBuildItem;
import io.quarkus.deployment.builditem.GeneratedResourceBuildItem;
import io.quarkus.deployment.builditem.HotDeploymentWatchedFileBuildItem;
import io.quarkus.deployment.builditem.LaunchModeBuildItem;
import io.quarkus.deployment.builditem.LiveReloadBuildItem;
import io.quarkus.deployment.builditem.ShutdownListenerBuildItem;
import io.quarkus.deployment.builditem.SystemPropertyBuildItem;
import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem;
import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem;
import io.quarkus.deployment.util.ServiceUtil;
import io.quarkus.deployment.util.WebJarUtil;
import io.quarkus.kubernetes.spi.KubernetesHealthLivenessPathBuildItem;
import io.quarkus.kubernetes.spi.KubernetesHealthReadinessPathBuildItem;
import io.quarkus.kubernetes.spi.KubernetesHealthStartupPathBuildItem;
import io.quarkus.maven.dependency.ResolvedDependency;
import io.quarkus.runtime.LaunchMode;
import io.quarkus.runtime.configuration.ConfigurationException;
import io.quarkus.smallrye.health.deployment.spi.HealthBuildItem;
import io.quarkus.smallrye.health.runtime.QuarkusAsyncHealthCheckFactory;
import io.quarkus.smallrye.health.runtime.ShutdownReadinessListener;
import io.quarkus.smallrye.health.runtime.SmallRyeHealthGroupHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeHealthHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeHealthRecorder;
import io.quarkus.smallrye.health.runtime.SmallRyeHealthRuntimeConfig;
import io.quarkus.smallrye.health.runtime.SmallRyeIndividualHealthGroupHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeLivenessHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeReadinessHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeStartupHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeWellnessHandler;
import io.quarkus.smallrye.openapi.deployment.spi.AddToOpenAPIDefinitionBuildItem;
import io.quarkus.vertx.http.deployment.NonApplicationRootPathBuildItem;
import io.quarkus.vertx.http.deployment.RouteBuildItem;
import io.smallrye.health.SmallRyeHealthReporter;
import io.smallrye.health.api.HealthGroup;
import io.smallrye.health.api.HealthGroups;
import io.smallrye.health.api.Wellness;
import io.vertx.core.Handler;
import io.vertx.ext.web.RoutingContext;
class SmallRyeHealthProcessor {
private static final Logger LOG = Logger.getLogger(SmallRyeHealthProcessor.class);
private static final DotName LIVENESS = DotName.createSimple(Liveness.class.getName());
private static final DotName READINESS = DotName.createSimple(Readiness.class.getName());
private static final DotName STARTUP = DotName.createSimple(Startup.class.getName());
private static final DotName HEALTH_GROUP = DotName.createSimple(HealthGroup.class.getName());
private static final DotName HEALTH_GROUPS = DotName.createSimple(HealthGroups.class.getName());
private static final DotName WELLNESS = DotName.createSimple(Wellness.class.getName());
private static final DotName JAX_RS_PATH = DotName.createSimple("javax.ws.rs.Path");
// For the UI
private static final String HEALTH_UI_WEBJAR_GROUP_ID = "io.smallrye";
private static final String HEALTH_UI_WEBJAR_ARTIFACT_ID = "smallrye-health-ui";
private static final String HEALTH_UI_WEBJAR_PREFIX = "META-INF/resources/health-ui/";
private static final String HEALTH_UI_FINAL_DESTINATION = "META-INF/health-ui-files";
private static final String JS_FILE_TO_UPDATE = "healthui.js";
private static final String INDEX_FILE_TO_UPDATE = "index.html";
// Branding files to monitor for changes
private static final String BRANDING_DIR = "META-INF/branding/";
private static final String BRANDING_LOGO_GENERAL = BRANDING_DIR + "logo.png";
private static final String BRANDING_LOGO_MODULE = BRANDING_DIR + "smallrye-health-ui.png";
private static final String BRANDING_STYLE_GENERAL = BRANDING_DIR + "style.css";
private static final String BRANDING_STYLE_MODULE = BRANDING_DIR + "smallrye-health-ui.css";
private static final String BRANDING_FAVICON_GENERAL = BRANDING_DIR + "favicon.ico";
private static final String BRANDING_FAVICON_MODULE = BRANDING_DIR + "smallrye-health-ui.ico";
static class OpenAPIIncluded implements BooleanSupplier {
HealthBuildTimeConfig config;
public boolean getAsBoolean() {
return config.openapiIncluded;
}
}
HealthBuildTimeConfig config;
@BuildStep
List<HotDeploymentWatchedFileBuildItem> brandingFiles() {
return Stream.of(BRANDING_LOGO_GENERAL,
BRANDING_STYLE_GENERAL,
BRANDING_FAVICON_GENERAL,
BRANDING_LOGO_MODULE,
BRANDING_STYLE_MODULE,
BRANDING_FAVICON_MODULE).map(HotDeploymentWatchedFileBuildItem::new)
.collect(Collectors.toList());
}
@BuildStep
void healthCheck(BuildProducer<AdditionalBeanBuildItem> buildItemBuildProducer,
List<HealthBuildItem> healthBuildItems) {
boolean extensionsEnabled = config.extensionsEnabled &&
!ConfigProvider.getConfig().getOptionalValue("mp.health.disable-default-procedures", boolean.class)
.orElse(false);
if (extensionsEnabled) {
for (HealthBuildItem buildItem : healthBuildItems) {
if (buildItem.isEnabled()) {
buildItemBuildProducer.produce(new AdditionalBeanBuildItem(buildItem.getHealthCheckClass()));
}
}
}
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
@SuppressWarnings("unchecked")
void build(SmallRyeHealthRecorder recorder,
BuildProducer<FeatureBuildItem> feature,
BuildProducer<AdditionalBeanBuildItem> additionalBean,
BuildProducer<BeanDefiningAnnotationBuildItem> beanDefiningAnnotation)
throws IOException, ClassNotFoundException {
feature.produce(new FeatureBuildItem(Feature.SMALLRYE_HEALTH));
// Discover the beans annotated with @Health, @Liveness, @Readiness, @Startup, @HealthGroup,
// @HealthGroups and @Wellness even if no scope is defined
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(LIVENESS));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(READINESS));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(STARTUP));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(HEALTH_GROUP));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(HEALTH_GROUPS));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(WELLNESS));
// Add additional beans
additionalBean.produce(new AdditionalBeanBuildItem(QuarkusAsyncHealthCheckFactory.class));
additionalBean.produce(new AdditionalBeanBuildItem(SmallRyeHealthReporter.class));
// Make ArC discover @HealthGroup as a qualifier
additionalBean.produce(new AdditionalBeanBuildItem(HealthGroup.class));
// Discover and register the HealthCheckResponseProvider
Set<String> providers = ServiceUtil.classNamesNamedIn(getClass().getClassLoader(),
"META-INF/services/" + HealthCheckResponseProvider.class.getName());
if (providers.isEmpty()) {
throw new IllegalStateException("No HealthCheckResponseProvider implementation found.");
} else if (providers.size() > 1) {
throw new IllegalStateException(
String.format("Multiple HealthCheckResponseProvider implementations found: %s", providers));
}
final String provider = providers.iterator().next();
final Class<? extends HealthCheckResponseProvider> responseProvider = (Class<? extends HealthCheckResponseProvider>) Class
.forName(provider, true, Thread.currentThread().getContextClassLoader());
recorder.registerHealthCheckResponseProvider(responseProvider);
}
@BuildStep
public void defineHealthRoutes(BuildProducer<RouteBuildItem> routes,
BeanArchiveIndexBuildItem beanArchiveIndex,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
SmallRyeHealthConfig healthConfig) {
IndexView index = beanArchiveIndex.getIndex();
// log a warning if users try to use MP Health annotations with JAX-RS @Path
warnIfJaxRsPathUsed(index, LIVENESS);
warnIfJaxRsPathUsed(index, READINESS);
warnIfJaxRsPathUsed(index, STARTUP);
warnIfJaxRsPathUsed(index, WELLNESS);
// Register the health handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.route(healthConfig.rootPath)
.routeConfigKey("quarkus.smallrye-health.root-path")
.handler(new SmallRyeHealthHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
// Register the liveness handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.livenessPath)
.handler(new SmallRyeLivenessHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
// Register the readiness handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.readinessPath)
.handler(new SmallRyeReadinessHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
// Find all health groups
Set<String> healthGroups = new HashSet<>();
// with simple @HealthGroup annotations
for (AnnotationInstance healthGroupAnnotation : index.getAnnotations(HEALTH_GROUP)) {
healthGroups.add(healthGroupAnnotation.value().asString());
}
// with @HealthGroups repeatable annotations
for (AnnotationInstance healthGroupsAnnotation : index.getAnnotations(HEALTH_GROUPS)) {
for (AnnotationInstance healthGroupAnnotation : healthGroupsAnnotation.value().asNestedArray()) {
healthGroups.add(healthGroupAnnotation.value().asString());
}
}
// Register the health group handlers
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.groupPath)
.handler(new SmallRyeHealthGroupHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
SmallRyeIndividualHealthGroupHandler handler = new SmallRyeIndividualHealthGroupHandler();
for (String healthGroup : healthGroups) {
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.groupPath + "/" + healthGroup)
.handler(handler)
.displayOnNotFoundPage()
.blockingRoute()
.build());
}
// Register the wellness handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.wellnessPath)
.handler(new SmallRyeWellnessHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
// Register the startup handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.startupPath)
.handler(new SmallRyeStartupHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
}
@BuildStep
public void translateSmallRyeConfigValues(SmallRyeHealthConfig healthConfig,
BuildProducer<SystemPropertyBuildItem> systemProperties) {
if (healthConfig.contextPropagation) {
systemProperties.produce(new SystemPropertyBuildItem("io.smallrye.health.context.propagation", "true"));
}
}
@BuildStep(onlyIf = OpenAPIIncluded.class)
public void includeInOpenAPIEndpoint(BuildProducer<AddToOpenAPIDefinitionBuildItem> openAPIProducer,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
Capabilities capabilities,
SmallRyeHealthConfig healthConfig) {
// Add to OpenAPI if OpenAPI is available
if (capabilities.isPresent(Capability.SMALLRYE_OPENAPI)) {
String healthRootPath = nonApplicationRootPathBuildItem.resolvePath(healthConfig.rootPath);
HealthOpenAPIFilter filter = new HealthOpenAPIFilter(healthRootPath,
nonApplicationRootPathBuildItem.resolveNestedPath(healthRootPath, healthConfig.livenessPath),
nonApplicationRootPathBuildItem.resolveNestedPath(healthRootPath, healthConfig.readinessPath),
nonApplicationRootPathBuildItem.resolveNestedPath(healthRootPath, healthConfig.startupPath));
openAPIProducer.produce(new AddToOpenAPIDefinitionBuildItem(filter));
}
}
private void warnIfJaxRsPathUsed(IndexView index, DotName healthAnnotation) {
Collection<AnnotationInstance> instances = index.getAnnotations(healthAnnotation);
for (AnnotationInstance instance : instances) {
boolean containsPath = false;
AnnotationTarget target = instance.target();
if (target.kind() == Kind.CLASS) {
if (target.asClass().classAnnotation(JAX_RS_PATH) != null) {
containsPath = true;
}
} else if (target.kind() == Kind.METHOD) {
if (target.asMethod().hasAnnotation(JAX_RS_PATH)) {
containsPath = true;
}
}
if (containsPath) {
LOG.warnv(
"The use of @Path has no effect when @{0} is used and should therefore be removed. Offending target is {1}: {2}",
healthAnnotation.withoutPackagePrefix(), target.kind(), target);
}
}
}
@BuildStep
public void kubernetes(NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
SmallRyeHealthConfig healthConfig,
BuildProducer<KubernetesHealthLivenessPathBuildItem> livenessPathItemProducer,
BuildProducer<KubernetesHealthReadinessPathBuildItem> readinessPathItemProducer,
BuildProducer<KubernetesHealthStartupPathBuildItem> startupPathItemProducer) {
livenessPathItemProducer.produce(
new KubernetesHealthLivenessPathBuildItem(
nonApplicationRootPathBuildItem.resolveNestedPath(healthConfig.rootPath, healthConfig.livenessPath)));
readinessPathItemProducer.produce(
new KubernetesHealthReadinessPathBuildItem(
nonApplicationRootPathBuildItem.resolveNestedPath(healthConfig.rootPath, healthConfig.readinessPath)));
startupPathItemProducer.produce(
new KubernetesHealthStartupPathBuildItem(
nonApplicationRootPathBuildItem.resolveNestedPath(healthConfig.rootPath, healthConfig.startupPath)));
}
@BuildStep
ShutdownListenerBuildItem shutdownListener() {
return new ShutdownListenerBuildItem(new ShutdownReadinessListener());
}
@BuildStep
AnnotationsTransformerBuildItem annotationTransformer(BeanArchiveIndexBuildItem beanArchiveIndex,
CustomScopeAnnotationsBuildItem scopes) {
// Transform health checks that are not annotated with a scope or a stereotype with a default scope
Set<DotName> stereotypeAnnotations = new HashSet<>();
for (AnnotationInstance annotation : beanArchiveIndex.getIndex().getAnnotations(DotNames.STEREOTYPE)) {
ClassInfo annotationClass = beanArchiveIndex.getIndex().getClassByName(annotation.name());
if (annotationClass != null && scopes.isScopeIn(annotationClass.classAnnotations())) {
// Stereotype annotation with a default scope
stereotypeAnnotations.add(annotationClass.name());
}
}
List<DotName> healthAnnotations = new ArrayList<>(5);
healthAnnotations.add(LIVENESS);
healthAnnotations.add(READINESS);
healthAnnotations.add(STARTUP);
healthAnnotations.add(HEALTH_GROUP);
healthAnnotations.add(HEALTH_GROUPS);
healthAnnotations.add(WELLNESS);
return new AnnotationsTransformerBuildItem(new AnnotationsTransformer() {
@Override
public boolean appliesTo(Kind kind) {
return kind == Kind.CLASS || kind == Kind.METHOD;
}
@Override
public void transform(TransformationContext ctx) {
if (ctx.getAnnotations().isEmpty()) {
return;
}
Collection<AnnotationInstance> annotations;
if (ctx.isClass()) {
annotations = ctx.getAnnotations();
if (containsAny(annotations, stereotypeAnnotations)) {
return;
}
} else {
annotations = getAnnotations(Kind.METHOD, ctx.getAnnotations());
}
if (scopes.isScopeIn(annotations)) {
return;
}
if (containsAny(annotations, healthAnnotations)) {
ctx.transform().add(BuiltinScope.SINGLETON.getName()).done();
}
}
});
}
// UI
@BuildStep
void registerUiExtension(
BuildProducer<GeneratedResourceBuildItem> generatedResourceProducer,
BuildProducer<NativeImageResourceBuildItem> nativeImageResourceProducer,
BuildProducer<SmallRyeHealthBuildItem> smallRyeHealthBuildProducer,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
SmallRyeHealthConfig healthConfig,
CurateOutcomeBuildItem curateOutcomeBuildItem,
LaunchModeBuildItem launchModeBuildItem,
LiveReloadBuildItem liveReloadBuildItem) throws Exception {
if (shouldInclude(launchModeBuildItem, healthConfig)) {
if ("/".equals(healthConfig.ui.rootPath)) {
throw new ConfigurationException(
"quarkus.smallrye-health.root-path-ui was set to \"/\", this is not allowed as it blocks the application from serving anything else.",
Set.of("quarkus.smallrye-health.root-path-ui"));
}
String healthPath = nonApplicationRootPathBuildItem.resolvePath(healthConfig.rootPath);
String healthUiPath = nonApplicationRootPathBuildItem.resolvePath(healthConfig.ui.rootPath);
ResolvedDependency artifact = WebJarUtil.getAppArtifact(curateOutcomeBuildItem, HEALTH_UI_WEBJAR_GROUP_ID,
HEALTH_UI_WEBJAR_ARTIFACT_ID);
if (launchModeBuildItem.getLaunchMode().isDevOrTest()) {
Path tempPath = WebJarUtil.copyResourcesForDevOrTest(liveReloadBuildItem, curateOutcomeBuildItem,
launchModeBuildItem,
artifact,
HEALTH_UI_WEBJAR_PREFIX);
if (launchModeBuildItem.getLaunchMode().equals(LaunchMode.DEVELOPMENT)) {
updateApiUrl(tempPath.resolve(JS_FILE_TO_UPDATE), healthPath);
updateApiUrl(tempPath.resolve(INDEX_FILE_TO_UPDATE), healthPath);
}
smallRyeHealthBuildProducer
.produce(new SmallRyeHealthBuildItem(tempPath.toAbsolutePath().toString(), healthUiPath));
// Handle live reload of branding files
if (liveReloadBuildItem.isLiveReload() && !liveReloadBuildItem.getChangedResources().isEmpty()) {
WebJarUtil.hotReloadBrandingChanges(curateOutcomeBuildItem, launchModeBuildItem, artifact,
liveReloadBuildItem.getChangedResources());
}
} else {
Map<String, byte[]> files = WebJarUtil.copyResourcesForProduction(curateOutcomeBuildItem, artifact,
HEALTH_UI_WEBJAR_PREFIX);
for (Map.Entry<String, byte[]> file : files.entrySet()) {
String fileName = file.getKey();
byte[] content = file.getValue();
if (fileName.endsWith(JS_FILE_TO_UPDATE) || fileName.endsWith(INDEX_FILE_TO_UPDATE)) {
content = updateApiUrl(new String(content, StandardCharsets.UTF_8), healthPath)
.getBytes(StandardCharsets.UTF_8);
}
fileName = HEALTH_UI_FINAL_DESTINATION + "/" + fileName;
generatedResourceProducer.produce(new GeneratedResourceBuildItem(fileName, content));
nativeImageResourceProducer.produce(new NativeImageResourceBuildItem(fileName));
}
smallRyeHealthBuildProducer.produce(new SmallRyeHealthBuildItem(HEALTH_UI_FINAL_DESTINATION, healthUiPath));
}
}
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
void registerHealthUiHandler(
BuildProducer<RouteBuildItem> routeProducer,
SmallRyeHealthRecorder recorder,
SmallRyeHealthRuntimeConfig runtimeConfig,
SmallRyeHealthBuildItem smallRyeHealthBuildItem,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
LaunchModeBuildItem launchMode,
SmallRyeHealthConfig healthConfig) {
if (shouldInclude(launchMode, healthConfig)) {
Handler<RoutingContext> handler = recorder.uiHandler(smallRyeHealthBuildItem.getHealthUiFinalDestination(),
smallRyeHealthBuildItem.getHealthUiPath(), runtimeConfig);
routeProducer.produce(nonApplicationRootPathBuildItem.routeBuilder()
.route(healthConfig.ui.rootPath)
.displayOnNotFoundPage("Health UI")
.routeConfigKey("quarkus.smallrye-health.ui.root-path")
.handler(handler)
.build());
routeProducer.produce(nonApplicationRootPathBuildItem.routeBuilder()
.route(healthConfig.ui.rootPath + "*")
.handler(handler)
.build());
}
}
private void updateApiUrl(Path fileToUpdate, String healthPath) throws IOException {
String content = new String(Files.readAllBytes(fileToUpdate), StandardCharsets.UTF_8);
String result = updateApiUrl(content, healthPath);
if (result != null) {
Files.write(fileToUpdate, result.getBytes(StandardCharsets.UTF_8));
}
}
// Replace health URL in static files
public String updateApiUrl(String original, String healthPath) {
return original.replace("url = \"/health\";", "url = \"" + healthPath + "\";")
.replace("placeholder=\"/health\"", "placeholder=\"" + healthPath + "\"");
}
private static boolean shouldInclude(LaunchModeBuildItem launchMode, SmallRyeHealthConfig healthConfig) {
return launchMode.getLaunchMode().isDevOrTest() || healthConfig.ui.alwaysInclude;
}
}
|
extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthProcessor.java
|
package io.quarkus.smallrye.health.deployment;
import static io.quarkus.arc.processor.Annotations.containsAny;
import static io.quarkus.arc.processor.Annotations.getAnnotations;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BooleanSupplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.health.Liveness;
import org.eclipse.microprofile.health.Readiness;
import org.eclipse.microprofile.health.Startup;
import org.eclipse.microprofile.health.spi.HealthCheckResponseProvider;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationTarget;
import org.jboss.jandex.AnnotationTarget.Kind;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.DotName;
import org.jboss.jandex.IndexView;
import org.jboss.logging.Logger;
import io.quarkus.arc.deployment.AdditionalBeanBuildItem;
import io.quarkus.arc.deployment.AnnotationsTransformerBuildItem;
import io.quarkus.arc.deployment.BeanArchiveIndexBuildItem;
import io.quarkus.arc.deployment.BeanDefiningAnnotationBuildItem;
import io.quarkus.arc.deployment.CustomScopeAnnotationsBuildItem;
import io.quarkus.arc.processor.AnnotationsTransformer;
import io.quarkus.arc.processor.BuiltinScope;
import io.quarkus.arc.processor.DotNames;
import io.quarkus.deployment.Capabilities;
import io.quarkus.deployment.Capability;
import io.quarkus.deployment.Feature;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.FeatureBuildItem;
import io.quarkus.deployment.builditem.GeneratedResourceBuildItem;
import io.quarkus.deployment.builditem.HotDeploymentWatchedFileBuildItem;
import io.quarkus.deployment.builditem.LaunchModeBuildItem;
import io.quarkus.deployment.builditem.LiveReloadBuildItem;
import io.quarkus.deployment.builditem.ShutdownListenerBuildItem;
import io.quarkus.deployment.builditem.SystemPropertyBuildItem;
import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem;
import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem;
import io.quarkus.deployment.util.ServiceUtil;
import io.quarkus.deployment.util.WebJarUtil;
import io.quarkus.kubernetes.spi.KubernetesHealthLivenessPathBuildItem;
import io.quarkus.kubernetes.spi.KubernetesHealthReadinessPathBuildItem;
import io.quarkus.kubernetes.spi.KubernetesHealthStartupPathBuildItem;
import io.quarkus.maven.dependency.ResolvedDependency;
import io.quarkus.runtime.LaunchMode;
import io.quarkus.runtime.configuration.ConfigurationException;
import io.quarkus.smallrye.health.deployment.spi.HealthBuildItem;
import io.quarkus.smallrye.health.runtime.QuarkusAsyncHealthCheckFactory;
import io.quarkus.smallrye.health.runtime.ShutdownReadinessListener;
import io.quarkus.smallrye.health.runtime.SmallRyeHealthGroupHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeHealthHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeHealthRecorder;
import io.quarkus.smallrye.health.runtime.SmallRyeHealthRuntimeConfig;
import io.quarkus.smallrye.health.runtime.SmallRyeIndividualHealthGroupHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeLivenessHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeReadinessHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeStartupHandler;
import io.quarkus.smallrye.health.runtime.SmallRyeWellnessHandler;
import io.quarkus.smallrye.openapi.deployment.spi.AddToOpenAPIDefinitionBuildItem;
import io.quarkus.vertx.http.deployment.NonApplicationRootPathBuildItem;
import io.quarkus.vertx.http.deployment.RouteBuildItem;
import io.smallrye.health.SmallRyeHealthReporter;
import io.smallrye.health.api.HealthGroup;
import io.smallrye.health.api.HealthGroups;
import io.smallrye.health.api.Wellness;
import io.vertx.core.Handler;
import io.vertx.ext.web.RoutingContext;
class SmallRyeHealthProcessor {
private static final Logger LOG = Logger.getLogger(SmallRyeHealthProcessor.class);
private static final DotName LIVENESS = DotName.createSimple(Liveness.class.getName());
private static final DotName READINESS = DotName.createSimple(Readiness.class.getName());
private static final DotName STARTUP = DotName.createSimple(Startup.class.getName());
private static final DotName HEALTH_GROUP = DotName.createSimple(HealthGroup.class.getName());
private static final DotName HEALTH_GROUPS = DotName.createSimple(HealthGroups.class.getName());
private static final DotName WELLNESS = DotName.createSimple(Wellness.class.getName());
private static final DotName JAX_RS_PATH = DotName.createSimple("javax.ws.rs.Path");
// For the UI
private static final String HEALTH_UI_WEBJAR_GROUP_ID = "io.smallrye";
private static final String HEALTH_UI_WEBJAR_ARTIFACT_ID = "smallrye-health-ui";
private static final String HEALTH_UI_WEBJAR_PREFIX = "META-INF/resources/health-ui/";
private static final String HEALTH_UI_FINAL_DESTINATION = "META-INF/health-ui-files";
private static final String JS_FILE_TO_UPDATE = "healthui.js";
private static final String INDEX_FILE_TO_UPDATE = "index.html";
// Branding files to monitor for changes
private static final String BRANDING_DIR = "META-INF/branding/";
private static final String BRANDING_LOGO_GENERAL = BRANDING_DIR + "logo.png";
private static final String BRANDING_LOGO_MODULE = BRANDING_DIR + "smallrye-health-ui.png";
private static final String BRANDING_STYLE_GENERAL = BRANDING_DIR + "style.css";
private static final String BRANDING_STYLE_MODULE = BRANDING_DIR + "smallrye-health-ui.css";
private static final String BRANDING_FAVICON_GENERAL = BRANDING_DIR + "favicon.ico";
private static final String BRANDING_FAVICON_MODULE = BRANDING_DIR + "smallrye-health-ui.ico";
static class OpenAPIIncluded implements BooleanSupplier {
HealthBuildTimeConfig config;
public boolean getAsBoolean() {
return config.openapiIncluded;
}
}
HealthBuildTimeConfig config;
@BuildStep
List<HotDeploymentWatchedFileBuildItem> brandingFiles() {
return Stream.of(BRANDING_LOGO_GENERAL,
BRANDING_STYLE_GENERAL,
BRANDING_FAVICON_GENERAL,
BRANDING_LOGO_MODULE,
BRANDING_STYLE_MODULE,
BRANDING_FAVICON_MODULE).map(HotDeploymentWatchedFileBuildItem::new)
.collect(Collectors.toList());
}
@BuildStep
void healthCheck(BuildProducer<AdditionalBeanBuildItem> buildItemBuildProducer,
List<HealthBuildItem> healthBuildItems) {
boolean extensionsEnabled = config.extensionsEnabled &&
!ConfigProvider.getConfig().getOptionalValue("mp.health.disable-default-procedures", boolean.class)
.orElse(false);
if (extensionsEnabled) {
for (HealthBuildItem buildItem : healthBuildItems) {
if (buildItem.isEnabled()) {
buildItemBuildProducer.produce(new AdditionalBeanBuildItem(buildItem.getHealthCheckClass()));
}
}
}
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
@SuppressWarnings("unchecked")
void build(SmallRyeHealthRecorder recorder,
BuildProducer<FeatureBuildItem> feature,
BuildProducer<AdditionalBeanBuildItem> additionalBean,
BuildProducer<BeanDefiningAnnotationBuildItem> beanDefiningAnnotation)
throws IOException, ClassNotFoundException {
feature.produce(new FeatureBuildItem(Feature.SMALLRYE_HEALTH));
// Discover the beans annotated with @Health, @Liveness, @Readiness, @Startup, @HealthGroup,
// @HealthGroups and @Wellness even if no scope is defined
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(LIVENESS));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(READINESS));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(STARTUP));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(HEALTH_GROUP));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(HEALTH_GROUPS));
beanDefiningAnnotation.produce(new BeanDefiningAnnotationBuildItem(WELLNESS));
// Add additional beans
additionalBean.produce(new AdditionalBeanBuildItem(QuarkusAsyncHealthCheckFactory.class));
additionalBean.produce(new AdditionalBeanBuildItem(SmallRyeHealthReporter.class));
// Make ArC discover @HealthGroup as a qualifier
additionalBean.produce(new AdditionalBeanBuildItem(HealthGroup.class));
// Discover and register the HealthCheckResponseProvider
Set<String> providers = ServiceUtil.classNamesNamedIn(getClass().getClassLoader(),
"META-INF/services/" + HealthCheckResponseProvider.class.getName());
if (providers.isEmpty()) {
throw new IllegalStateException("No HealthCheckResponseProvider implementation found.");
} else if (providers.size() > 1) {
throw new IllegalStateException(
String.format("Multiple HealthCheckResponseProvider implementations found: %s", providers));
}
final String provider = providers.iterator().next();
final Class<? extends HealthCheckResponseProvider> responseProvider = (Class<? extends HealthCheckResponseProvider>) Class
.forName(provider, true, Thread.currentThread().getContextClassLoader());
recorder.registerHealthCheckResponseProvider(responseProvider);
}
@BuildStep
public void defineHealthRoutes(BuildProducer<RouteBuildItem> routes,
BeanArchiveIndexBuildItem beanArchiveIndex,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
SmallRyeHealthConfig healthConfig) {
IndexView index = beanArchiveIndex.getIndex();
// log a warning if users try to use MP Health annotations with JAX-RS @Path
warnIfJaxRsPathUsed(index, LIVENESS);
warnIfJaxRsPathUsed(index, READINESS);
warnIfJaxRsPathUsed(index, STARTUP);
warnIfJaxRsPathUsed(index, WELLNESS);
// Register the health handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.route(healthConfig.rootPath)
.routeConfigKey("quarkus.smallrye-health.root-path")
.handler(new SmallRyeHealthHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
// Register the liveness handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.livenessPath)
.handler(new SmallRyeLivenessHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
// Register the readiness handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.readinessPath)
.handler(new SmallRyeReadinessHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
// Find all health groups
Set<String> healthGroups = new HashSet<>();
// with simple @HealthGroup annotations
for (AnnotationInstance healthGroupAnnotation : index.getAnnotations(HEALTH_GROUP)) {
healthGroups.add(healthGroupAnnotation.value().asString());
}
// with @HealthGroups repeatable annotations
for (AnnotationInstance healthGroupsAnnotation : index.getAnnotations(HEALTH_GROUPS)) {
for (AnnotationInstance healthGroupAnnotation : healthGroupsAnnotation.value().asNestedArray()) {
healthGroups.add(healthGroupAnnotation.value().asString());
}
}
// Register the health group handlers
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.groupPath)
.handler(new SmallRyeHealthGroupHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
SmallRyeIndividualHealthGroupHandler handler = new SmallRyeIndividualHealthGroupHandler();
for (String healthGroup : healthGroups) {
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.groupPath + "/" + healthGroup)
.handler(handler)
.displayOnNotFoundPage()
.blockingRoute()
.build());
}
// Register the wellness handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.wellnessPath)
.handler(new SmallRyeWellnessHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
// Register the startup handler
routes.produce(nonApplicationRootPathBuildItem.routeBuilder()
.nestedRoute(healthConfig.rootPath, healthConfig.startupPath)
.handler(new SmallRyeStartupHandler())
.displayOnNotFoundPage()
.blockingRoute()
.build());
}
@BuildStep
public void translateSmallRyeConfigValues(SmallRyeHealthConfig healthConfig,
BuildProducer<SystemPropertyBuildItem> systemProperties) {
if (healthConfig.contextPropagation) {
systemProperties.produce(new SystemPropertyBuildItem("io.smallrye.health.context.propagation", "true"));
}
}
@BuildStep(onlyIf = OpenAPIIncluded.class)
public void includeInOpenAPIEndpoint(BuildProducer<AddToOpenAPIDefinitionBuildItem> openAPIProducer,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
Capabilities capabilities,
SmallRyeHealthConfig healthConfig) {
// Add to OpenAPI if OpenAPI is available
if (capabilities.isPresent(Capability.SMALLRYE_OPENAPI)) {
String healthRootPath = nonApplicationRootPathBuildItem.resolvePath(healthConfig.rootPath);
HealthOpenAPIFilter filter = new HealthOpenAPIFilter(healthRootPath,
nonApplicationRootPathBuildItem.resolveNestedPath(healthRootPath, healthConfig.livenessPath),
nonApplicationRootPathBuildItem.resolveNestedPath(healthRootPath, healthConfig.readinessPath),
nonApplicationRootPathBuildItem.resolveNestedPath(healthRootPath, healthConfig.startupPath));
openAPIProducer.produce(new AddToOpenAPIDefinitionBuildItem(filter));
}
}
private void warnIfJaxRsPathUsed(IndexView index, DotName healthAnnotation) {
Collection<AnnotationInstance> instances = index.getAnnotations(healthAnnotation);
for (AnnotationInstance instance : instances) {
boolean containsPath = false;
AnnotationTarget target = instance.target();
if (target.kind() == Kind.CLASS) {
if (target.asClass().classAnnotation(JAX_RS_PATH) != null) {
containsPath = true;
}
} else if (target.kind() == Kind.METHOD) {
if (target.asMethod().hasAnnotation(JAX_RS_PATH)) {
containsPath = true;
}
}
if (containsPath) {
LOG.warnv(
"The use of @Path has no effect when @{0} is used and should therefore be removed. Offending target is {1}: {2}",
healthAnnotation.withoutPackagePrefix(), target.kind(), target);
}
}
}
@BuildStep
public void kubernetes(NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
SmallRyeHealthConfig healthConfig,
BuildProducer<KubernetesHealthLivenessPathBuildItem> livenessPathItemProducer,
BuildProducer<KubernetesHealthReadinessPathBuildItem> readinessPathItemProducer,
BuildProducer<KubernetesHealthStartupPathBuildItem> startupPathItemProducer) {
livenessPathItemProducer.produce(
new KubernetesHealthLivenessPathBuildItem(
nonApplicationRootPathBuildItem.resolveNestedPath(healthConfig.rootPath, healthConfig.livenessPath)));
readinessPathItemProducer.produce(
new KubernetesHealthReadinessPathBuildItem(
nonApplicationRootPathBuildItem.resolveNestedPath(healthConfig.rootPath, healthConfig.readinessPath)));
startupPathItemProducer.produce(
new KubernetesHealthStartupPathBuildItem(
nonApplicationRootPathBuildItem.resolveNestedPath(healthConfig.rootPath, healthConfig.startupPath)));
}
@BuildStep
ShutdownListenerBuildItem shutdownListener() {
return new ShutdownListenerBuildItem(new ShutdownReadinessListener());
}
@BuildStep
AnnotationsTransformerBuildItem annotationTransformer(BeanArchiveIndexBuildItem beanArchiveIndex,
CustomScopeAnnotationsBuildItem scopes) {
// Transform health checks that are not annotated with a scope or a stereotype with a default scope
Set<DotName> stereotypeAnnotations = new HashSet<>();
for (AnnotationInstance annotation : beanArchiveIndex.getIndex().getAnnotations(DotNames.STEREOTYPE)) {
ClassInfo annotationClass = beanArchiveIndex.getIndex().getClassByName(annotation.name());
if (annotationClass != null && scopes.isScopeIn(annotationClass.classAnnotations())) {
// Stereotype annotation with a default scope
stereotypeAnnotations.add(annotationClass.name());
}
}
List<DotName> healthAnnotations = new ArrayList<>(5);
healthAnnotations.add(LIVENESS);
healthAnnotations.add(READINESS);
healthAnnotations.add(STARTUP);
healthAnnotations.add(HEALTH_GROUP);
healthAnnotations.add(HEALTH_GROUPS);
healthAnnotations.add(WELLNESS);
return new AnnotationsTransformerBuildItem(new AnnotationsTransformer() {
@Override
public boolean appliesTo(Kind kind) {
return kind == Kind.CLASS || kind == Kind.METHOD;
}
@Override
public void transform(TransformationContext ctx) {
if (ctx.getAnnotations().isEmpty()) {
return;
}
Collection<AnnotationInstance> annotations;
if (ctx.isClass()) {
annotations = ctx.getAnnotations();
if (containsAny(annotations, stereotypeAnnotations)) {
return;
}
} else {
annotations = getAnnotations(Kind.METHOD, ctx.getAnnotations());
}
if (scopes.isScopeIn(annotations)) {
return;
}
if (containsAny(annotations, healthAnnotations)) {
ctx.transform().add(BuiltinScope.SINGLETON.getName()).done();
}
}
});
}
// UI
@BuildStep
void registerUiExtension(
BuildProducer<GeneratedResourceBuildItem> generatedResourceProducer,
BuildProducer<NativeImageResourceBuildItem> nativeImageResourceProducer,
BuildProducer<SmallRyeHealthBuildItem> smallRyeHealthBuildProducer,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
SmallRyeHealthConfig healthConfig,
CurateOutcomeBuildItem curateOutcomeBuildItem,
LaunchModeBuildItem launchMode,
LiveReloadBuildItem liveReloadBuildItem) throws Exception {
if (shouldInclude(launchMode, healthConfig)) {
if ("/".equals(healthConfig.ui.rootPath)) {
throw new ConfigurationException(
"quarkus.smallrye-health.root-path-ui was set to \"/\", this is not allowed as it blocks the application from serving anything else.",
Set.of("quarkus.smallrye-health.root-path-ui"));
}
String healthPath = nonApplicationRootPathBuildItem.resolvePath(healthConfig.rootPath);
String healthUiPath = nonApplicationRootPathBuildItem.resolvePath(healthConfig.ui.rootPath);
ResolvedDependency artifact = WebJarUtil.getAppArtifact(curateOutcomeBuildItem, HEALTH_UI_WEBJAR_GROUP_ID,
HEALTH_UI_WEBJAR_ARTIFACT_ID);
if (launchMode.getLaunchMode().isDevOrTest()) {
Path tempPath = WebJarUtil.copyResourcesForDevOrTest(liveReloadBuildItem, curateOutcomeBuildItem, launchMode,
artifact,
HEALTH_UI_WEBJAR_PREFIX);
if (launchMode.equals(LaunchMode.DEVELOPMENT)) {
updateApiUrl(tempPath.resolve(JS_FILE_TO_UPDATE), healthPath);
updateApiUrl(tempPath.resolve(INDEX_FILE_TO_UPDATE), healthPath);
}
smallRyeHealthBuildProducer
.produce(new SmallRyeHealthBuildItem(tempPath.toAbsolutePath().toString(), healthUiPath));
// Handle live reload of branding files
if (liveReloadBuildItem.isLiveReload() && !liveReloadBuildItem.getChangedResources().isEmpty()) {
WebJarUtil.hotReloadBrandingChanges(curateOutcomeBuildItem, launchMode, artifact,
liveReloadBuildItem.getChangedResources());
}
} else {
Map<String, byte[]> files = WebJarUtil.copyResourcesForProduction(curateOutcomeBuildItem, artifact,
HEALTH_UI_WEBJAR_PREFIX);
for (Map.Entry<String, byte[]> file : files.entrySet()) {
String fileName = file.getKey();
byte[] content = file.getValue();
if (fileName.endsWith(JS_FILE_TO_UPDATE) || fileName.endsWith(INDEX_FILE_TO_UPDATE)) {
content = updateApiUrl(new String(content, StandardCharsets.UTF_8), healthPath)
.getBytes(StandardCharsets.UTF_8);
}
fileName = HEALTH_UI_FINAL_DESTINATION + "/" + fileName;
generatedResourceProducer.produce(new GeneratedResourceBuildItem(fileName, content));
nativeImageResourceProducer.produce(new NativeImageResourceBuildItem(fileName));
}
smallRyeHealthBuildProducer.produce(new SmallRyeHealthBuildItem(HEALTH_UI_FINAL_DESTINATION, healthUiPath));
}
}
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
void registerHealthUiHandler(
BuildProducer<RouteBuildItem> routeProducer,
SmallRyeHealthRecorder recorder,
SmallRyeHealthRuntimeConfig runtimeConfig,
SmallRyeHealthBuildItem smallRyeHealthBuildItem,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
LaunchModeBuildItem launchMode,
SmallRyeHealthConfig healthConfig) {
if (shouldInclude(launchMode, healthConfig)) {
Handler<RoutingContext> handler = recorder.uiHandler(smallRyeHealthBuildItem.getHealthUiFinalDestination(),
smallRyeHealthBuildItem.getHealthUiPath(), runtimeConfig);
routeProducer.produce(nonApplicationRootPathBuildItem.routeBuilder()
.route(healthConfig.ui.rootPath)
.displayOnNotFoundPage("Health UI")
.routeConfigKey("quarkus.smallrye-health.ui.root-path")
.handler(handler)
.build());
routeProducer.produce(nonApplicationRootPathBuildItem.routeBuilder()
.route(healthConfig.ui.rootPath + "*")
.handler(handler)
.build());
}
}
private void updateApiUrl(Path fileToUpdate, String healthPath) throws IOException {
String content = new String(Files.readAllBytes(fileToUpdate), StandardCharsets.UTF_8);
String result = updateApiUrl(content, healthPath);
if (result != null) {
Files.write(fileToUpdate, result.getBytes(StandardCharsets.UTF_8));
}
}
// Replace health URL in static files
public String updateApiUrl(String original, String healthPath) {
return original.replace("url = \"/health\";", "url = \"" + healthPath + "\";")
.replace("placeholder=\"/health\"", "placeholder=\"" + healthPath + "\"");
}
private static boolean shouldInclude(LaunchModeBuildItem launchMode, SmallRyeHealthConfig healthConfig) {
return launchMode.getLaunchMode().isDevOrTest() || healthConfig.ui.alwaysInclude;
}
}
|
Health-UI calls the wrong URL for health in dev mode
|
extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthProcessor.java
|
Health-UI calls the wrong URL for health in dev mode
|
|
Java
|
apache-2.0
|
05242b269662cd24387f77310919925b23b121af
| 0
|
lucafavatella/intellij-community,signed/intellij-community,signed/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,holmes/intellij-community,vvv1559/intellij-community,caot/intellij-community,kool79/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,kool79/intellij-community,wreckJ/intellij-community,caot/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,FHannes/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,kool79/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,holmes/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,da1z/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,jagguli/intellij-community,robovm/robovm-studio,adedayo/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,jagguli/intellij-community,semonte/intellij-community,nicolargo/intellij-community,slisson/intellij-community,consulo/consulo,vladmm/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,jagguli/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,apixandru/intellij-community,ryano144/intellij-community,kdwink/intellij-community,fitermay/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,izonder/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,semonte/intellij-community,apixandru/intellij-community,ernestp/consulo,diorcety/intellij-community,supersven/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,samthor/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,holmes/intellij-community,youdonghai/intellij-community,holmes/intellij-community,hurricup/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,ibinti/intellij-community,izonder/intellij-community,ernestp/consulo,ibinti/intellij-community,ryano144/intellij-community,diorcety/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,ibinti/intellij-community,consulo/consulo,semonte/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,kool79/intellij-community,caot/intellij-community,clumsy/intellij-community,retomerz/intellij-community,signed/intellij-community,dslomov/intellij-community,fitermay/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,hurricup/intellij-community,fnouama/intellij-community,slisson/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,consulo/consulo,kdwink/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,clumsy/intellij-community,caot/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,blademainer/intellij-community,consulo/consulo,fengbaicanhe/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,alphafoobar/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,caot/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,fnouama/intellij-community,slisson/intellij-community,petteyg/intellij-community,kdwink/intellij-community,robovm/robovm-studio,petteyg/intellij-community,signed/intellij-community,blademainer/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,gnuhub/intellij-community,izonder/intellij-community,fnouama/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,signed/intellij-community,dslomov/intellij-community,izonder/intellij-community,clumsy/intellij-community,da1z/intellij-community,fnouama/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,signed/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,caot/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,ahb0327/intellij-community,izonder/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,robovm/robovm-studio,adedayo/intellij-community,amith01994/intellij-community,signed/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,supersven/intellij-community,FHannes/intellij-community,holmes/intellij-community,gnuhub/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,adedayo/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,supersven/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,slisson/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,signed/intellij-community,slisson/intellij-community,da1z/intellij-community,signed/intellij-community,vvv1559/intellij-community,consulo/consulo,amith01994/intellij-community,caot/intellij-community,amith01994/intellij-community,ryano144/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,signed/intellij-community,holmes/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,adedayo/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,allotria/intellij-community,petteyg/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,fnouama/intellij-community,allotria/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,ernestp/consulo,da1z/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,slisson/intellij-community,vladmm/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,retomerz/intellij-community,ryano144/intellij-community,apixandru/intellij-community,FHannes/intellij-community,petteyg/intellij-community,amith01994/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,semonte/intellij-community,clumsy/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,ryano144/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,semonte/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,izonder/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,asedunov/intellij-community,allotria/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,hurricup/intellij-community,vladmm/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,caot/intellij-community,da1z/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,vladmm/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,holmes/intellij-community,nicolargo/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,allotria/intellij-community,retomerz/intellij-community,petteyg/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,vladmm/intellij-community,slisson/intellij-community,robovm/robovm-studio,semonte/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,kool79/intellij-community,ibinti/intellij-community,da1z/intellij-community,blademainer/intellij-community,da1z/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,kool79/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,ibinti/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,apixandru/intellij-community,signed/intellij-community,tmpgit/intellij-community,izonder/intellij-community,petteyg/intellij-community,supersven/intellij-community,diorcety/intellij-community,fnouama/intellij-community,apixandru/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,allotria/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,samthor/intellij-community,holmes/intellij-community,jagguli/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,dslomov/intellij-community,allotria/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,da1z/intellij-community,youdonghai/intellij-community,holmes/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,caot/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,izonder/intellij-community,samthor/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,izonder/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,ernestp/consulo,supersven/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,ernestp/consulo,clumsy/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,da1z/intellij-community,allotria/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,consulo/consulo,kool79/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,slisson/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,ernestp/consulo,wreckJ/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,diorcety/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,ibinti/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,kdwink/intellij-community,apixandru/intellij-community,xfournet/intellij-community,samthor/intellij-community,clumsy/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,signed/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,diorcety/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,blademainer/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,signed/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,ibinti/intellij-community,kool79/intellij-community,ahb0327/intellij-community,samthor/intellij-community,fitermay/intellij-community,allotria/intellij-community,caot/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,supersven/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,kdwink/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,izonder/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,FHannes/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community
|
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.platform.templates;
import com.intellij.ide.fileTemplates.impl.UrlUtil;
import com.intellij.ide.plugins.IdeaPluginDescriptor;
import com.intellij.ide.plugins.PluginManager;
import com.intellij.ide.util.projectWizard.WizardContext;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.ClearableLazyValue;
import com.intellij.openapi.util.Pair;
import com.intellij.platform.ProjectTemplate;
import com.intellij.platform.ProjectTemplatesFactory;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
/**
* @author Dmitry Avdeev
* @since 10/1/12
*/
public class ArchivedTemplatesFactory extends ProjectTemplatesFactory {
private static final String ZIP = ".zip";
private final ClearableLazyValue<MultiMap<String, Pair<URL, ClassLoader>>> myGroups = new ClearableLazyValue<MultiMap<String, Pair<URL, ClassLoader>>>() {
@NotNull
@Override
protected MultiMap<String, Pair<URL, ClassLoader>> compute() {
MultiMap<String, Pair<URL, ClassLoader>> map = new MultiMap<String, Pair<URL, ClassLoader>>();
IdeaPluginDescriptor[] plugins = PluginManager.getPlugins();
Map<URL, ClassLoader> urls = new HashMap<URL, ClassLoader>();
for (IdeaPluginDescriptor plugin : plugins) {
if (!plugin.isEnabled()) continue;
try {
ClassLoader loader = plugin.getPluginClassLoader();
Enumeration<URL> resources = loader.getResources("resources/projectTemplates");
ArrayList<URL> list = Collections.list(resources);
for (URL url : list) {
urls.put(url, loader);
}
}
catch (IOException e) {
LOG.error(e);
}
}
URL configURL = getCustomTemplatesURL();
if (configURL != null) {
urls.put(configURL, ClassLoader.getSystemClassLoader());
}
for (Map.Entry<URL, ClassLoader> url : urls.entrySet()) {
try {
List<String> children = UrlUtil.getChildrenRelativePaths(url.getKey());
if (configURL == url.getKey() && !children.isEmpty()) {
map.putValue(CUSTOM_GROUP, Pair.create(url.getKey(), url.getValue()));
continue;
}
for (String child : children) {
int index = child.indexOf('/');
if (index != -1) {
child = child.substring(0, index);
}
map.putValue(child.replace('_', ' '), Pair.create(new URL(url.getKey().toExternalForm() + "/" + child), url.getValue()));
}
}
catch (IOException e) {
LOG.error(e);
}
}
return map;
}
};
private static URL getCustomTemplatesURL() {
String path = getCustomTemplatesPath();
try {
return new File(path).toURI().toURL();
}
catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
static String getCustomTemplatesPath() {
return PathManager.getConfigPath() + "/projectTemplates";
}
public static File getTemplateFile(String name) {
String configURL = getCustomTemplatesPath();
return new File(configURL + "/" + name + ".zip");
}
@NotNull
@Override
public String[] getGroups() {
myGroups.drop();
Set<String> groups = myGroups.getValue().keySet();
return ArrayUtil.toStringArray(groups);
}
@NotNull
@Override
public ProjectTemplate[] createTemplates(String group, WizardContext context) {
Collection<Pair<URL, ClassLoader>> urls = myGroups.getValue().get(group);
List<ProjectTemplate> templates = new ArrayList<ProjectTemplate>();
for (Pair<URL, ClassLoader> url : urls) {
try {
List<String> children = UrlUtil.getChildrenRelativePaths(url.first);
for (String child : children) {
if (child.endsWith(ZIP)) {
URL templateUrl = new URL(url.first.toExternalForm() + "/" + child);
String name = child.substring(0, child.length() - ZIP.length()).replace('_', ' ');
templates.add(new LocalArchivedTemplate(name, templateUrl, url.second));
}
}
}
catch (IOException e) {
LOG.error(e);
}
}
return templates.toArray(new ProjectTemplate[templates.size()]);
}
@Override
public int getGroupWeight(String group) {
return CUSTOM_GROUP.equals(group) ? -2 : 0;
}
private final static Logger LOG = Logger.getInstance(ArchivedTemplatesFactory.class);
}
|
java/idea-ui/src/com/intellij/platform/templates/ArchivedTemplatesFactory.java
|
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.platform.templates;
import com.intellij.ide.fileTemplates.impl.UrlUtil;
import com.intellij.ide.plugins.IdeaPluginDescriptor;
import com.intellij.ide.plugins.PluginManager;
import com.intellij.ide.util.projectWizard.WizardContext;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.ClearableLazyValue;
import com.intellij.openapi.util.Pair;
import com.intellij.platform.ProjectTemplate;
import com.intellij.platform.ProjectTemplatesFactory;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
/**
* @author Dmitry Avdeev
* @since 10/1/12
*/
public class ArchivedTemplatesFactory extends ProjectTemplatesFactory {
private static final String ZIP = ".zip";
private final ClearableLazyValue<MultiMap<String, Pair<URL, ClassLoader>>> myGroups = new ClearableLazyValue<MultiMap<String, Pair<URL, ClassLoader>>>() {
@NotNull
@Override
protected MultiMap<String, Pair<URL, ClassLoader>> compute() {
MultiMap<String, Pair<URL, ClassLoader>> map = new MultiMap<String, Pair<URL, ClassLoader>>();
IdeaPluginDescriptor[] plugins = PluginManager.getPlugins();
Map<URL, ClassLoader> urls = new HashMap<URL, ClassLoader>();
for (IdeaPluginDescriptor plugin : plugins) {
try {
ClassLoader loader = plugin.getPluginClassLoader();
Enumeration<URL> resources = loader.getResources("resources/projectTemplates");
ArrayList<URL> list = Collections.list(resources);
for (URL url : list) {
urls.put(url, loader);
}
}
catch (IOException e) {
LOG.error(e);
}
}
URL configURL = getCustomTemplatesURL();
if (configURL != null) {
urls.put(configURL, ClassLoader.getSystemClassLoader());
}
for (Map.Entry<URL, ClassLoader> url : urls.entrySet()) {
try {
List<String> children = UrlUtil.getChildrenRelativePaths(url.getKey());
if (configURL == url.getKey() && !children.isEmpty()) {
map.putValue(CUSTOM_GROUP, Pair.create(url.getKey(), url.getValue()));
continue;
}
for (String child : children) {
int index = child.indexOf('/');
if (index != -1) {
child = child.substring(0, index);
}
map.putValue(child.replace('_', ' '), Pair.create(new URL(url.getKey().toExternalForm() + "/" + child), url.getValue()));
}
}
catch (IOException e) {
LOG.error(e);
}
}
return map;
}
};
private static URL getCustomTemplatesURL() {
String path = getCustomTemplatesPath();
try {
return new File(path).toURI().toURL();
}
catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
static String getCustomTemplatesPath() {
return PathManager.getConfigPath() + "/projectTemplates";
}
public static File getTemplateFile(String name) {
String configURL = getCustomTemplatesPath();
return new File(configURL + "/" + name + ".zip");
}
@NotNull
@Override
public String[] getGroups() {
myGroups.drop();
Set<String> groups = myGroups.getValue().keySet();
return ArrayUtil.toStringArray(groups);
}
@NotNull
@Override
public ProjectTemplate[] createTemplates(String group, WizardContext context) {
Collection<Pair<URL, ClassLoader>> urls = myGroups.getValue().get(group);
List<ProjectTemplate> templates = new ArrayList<ProjectTemplate>();
for (Pair<URL, ClassLoader> url : urls) {
try {
List<String> children = UrlUtil.getChildrenRelativePaths(url.first);
for (String child : children) {
if (child.endsWith(ZIP)) {
URL templateUrl = new URL(url.first.toExternalForm() + "/" + child);
String name = child.substring(0, child.length() - ZIP.length()).replace('_', ' ');
templates.add(new LocalArchivedTemplate(name, templateUrl, url.second));
}
}
}
catch (IOException e) {
LOG.error(e);
}
}
return templates.toArray(new ProjectTemplate[templates.size()]);
}
@Override
public int getGroupWeight(String group) {
return CUSTOM_GROUP.equals(group) ? -2 : 0;
}
private final static Logger LOG = Logger.getInstance(ArchivedTemplatesFactory.class);
}
|
IDEA-106723 Project Templates provided by disabled plugins still appear in wizard [nik]
|
java/idea-ui/src/com/intellij/platform/templates/ArchivedTemplatesFactory.java
|
IDEA-106723 Project Templates provided by disabled plugins still appear in wizard [nik]
|
|
Java
|
apache-2.0
|
dd01362d783fc4da74d3bdf05d9ba47649033cc5
| 0
|
SlapTurtle/SoftwareProject
|
package Tests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.junit.*;
import java.util.Calendar;
import java.util.GregorianCalendar;
import project.*;
public class TestDateServer extends TestBasis {
@Test
public void testDateServer(){
//creates DateServer
DateServer ds = new DateServer();
//tests getCalendar
Calendar cal = new GregorianCalendar();
assertEquals(ds.getCalendar(), cal);
//tests getWeek
Week w = new Week(cal.get(Calendar.YEAR), cal.get(Calendar.WEEK_OF_YEAR));
assertEquals(ds.getToday().compareTo(w), 0);
}
}
|
SE1/src/Tests/TestDateServer.java
|
package Tests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.junit.*;
import java.util.Calendar;
import java.util.GregorianCalendar;
import project.*;
public class TestDateServer extends TestBasis {
@Test
public void testDateServer(){
//creates DateServer
DateServer ds = new DateServer();
//tests getCalendar
Calendar cal = new GregorianCalendar();
assertEquals(ds.getCalendar(), cal);
//tests getWeek
Week w = new Week(cal.get(Calendar.YEAR), cal.get(Calendar.WEEK_OF_YEAR));
assertEquals(ds.getToday().compareTo(w), 0);
}
}
|
Virker det så nu1!?
|
SE1/src/Tests/TestDateServer.java
|
Virker det så nu1!?
|
|
Java
|
bsd-3-clause
|
4fe47c52d0fa91c004d5cf846621be3a0800827d
| 0
|
BayesianLogic/blog,BayesianLogic/blog,BayesianLogic/blog,BayesianLogic/blog,BayesianLogic/blog
|
/*
* Copyright (c) 2005, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of the University of California, Berkeley nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package blog.distrib;
import java.io.Serializable;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import blog.common.numerical.JamaMatrixLib;
import blog.common.numerical.MatrixLib;
import blog.common.Util;
import blog.distrib.AbstractCondProbDistrib;
import blog.model.MatrixSpec;
import blog.model.Type;
/**
* Multinomial distribution.
*
* See https://en.wikipedia.org/wiki/Multinomial_distribution
*/
public class Multinomial extends AbstractCondProbDistrib {
private void init(int numTrials, double[] pi) {
this.numTrials = numTrials;
this.pi = (double[]) pi.clone();
double sum = 0;
for (int i = 0; i < pi.length; i++) {
if (pi[i] < 0) {
throw new IllegalArgumentException("Probability " + pi[i]
+ " for element " + i + " is negative.");
}
sum += pi[i];
}
if (sum < 1e-9) {
throw new IllegalArgumentException("Probabilities sum to approx zero");
}
for (int i = 0; i < pi.length; i++) {
this.pi[i] /= sum;
}
}
/**
* Creates a Multinomial object with probabilities specified by the given
* array.
*
* @throws IllegalArgumentException
* if pi does not define a probability distribution
*/
public Multinomial(int numTrials, double[] pi) {
init(numTrials, pi);
}
public Multinomial(List params) {
if (params.size() != 2) {
throw new IllegalArgumentException("expected numTrials and pi");
}
if (!(params.get(0) instanceof Integer)) {
throw new IllegalArgumentException("expected first arg to be integer numTrials");
}
int numTrials = (Integer) params.get(0);
Object objectPi = params.get(1);
if (objectPi instanceof MatrixSpec) {
objectPi = ((MatrixSpec) objectPi).getValueIfNonRandom();
}
if (!(objectPi instanceof MatrixLib)) {
throw new IllegalArgumentException(
"expected second arg to be array of reals; got " + objectPi +
" instead, which is of type " + objectPi.getClass().getName());
}
MatrixLib pi = (MatrixLib) objectPi;
if (pi.colLen() != 1) {
throw new IllegalArgumentException("expected second arg to be column vector");
}
double[] nativePi = new double[pi.rowLen()];
for (int i = 0; i < pi.rowLen(); i++) {
nativePi[i] = pi.elementAt(i, 0);
}
init(numTrials, nativePi);
}
/**
* Returns the probability of given vector.
*/
public double getProb(List args, Object value) {
if (!(value instanceof MatrixLib)) {
throw new IllegalArgumentException("expected vector value");
}
final int numBuckets = pi.length;
MatrixLib valueVector = (MatrixLib) value;
if (valueVector.rowLen() != numBuckets || valueVector.colLen() != 1) {
throw new IllegalArgumentException("value has wrong dimension");
}
int sum = 0;
for (int i = 0; i < numBuckets; i++) {
sum += valueVector.elementAt(i, 0);
}
if (sum != numTrials) {
return 0;
}
double prob = Util.factorial(numTrials);
for (int i = 0; i < numBuckets; i++) {
prob *= Math.pow(pi[i], valueVector.elementAt(i, 0));
prob /= Util.factorial((int)Math.round(valueVector.elementAt(i, 0)));
// FIXME: It would be better if we could take the param as an array
// of ints, so we don't have to worry about rounding.
}
return prob;
}
/**
* Returns the log probability of given vector.
*/
public double getLogProb(List args, Object value) {
if (!(value instanceof MatrixLib)) {
throw new IllegalArgumentException("expected vector value");
}
MatrixLib valueVector = (MatrixLib) value;
if (valueVector.rowLen() != numTrials) {
throw new IllegalArgumentException("value has wrong dimension");
}
int sum = 0;
for (int i = 0; i < numTrials; i++) {
sum += valueVector.elementAt(i, 0);
}
if (sum != numTrials) {
return 0;
}
double logProb = Util.logFactorial(numTrials);
for (int i = 0; i < numTrials; i++) {
logProb += valueVector.elementAt(i, 0) * Math.log(pi[i]);
logProb -= Util.logFactorial((int)Math.round(valueVector.elementAt(i, 0)));
}
return logProb;
}
/**
* Returns a vector chosen at random according to this distribution.
*/
public MatrixLib sampleVal(List args, Type childType) {
final int numBuckets = pi.length;
double[] cdf = new double[numBuckets];
cdf[0] = pi[0];
for (int i = 1; i < numBuckets; i++) {
cdf[i] = cdf[i - 1] + pi[i];
}
int[] result = new int[numBuckets];
for (int i = 0; i < numBuckets; i++) {
result[i] = 0;
}
Random rng = new java.util.Random();
for (int trial = 0; trial < numTrials; trial++) {
double val = rng.nextDouble();
int bucket;
for (bucket = 0; bucket < numBuckets; bucket++) {
if (val <= cdf[bucket]) {
break;
}
}
result[bucket] += 1;
}
// Convert to Jama (nasty).
double[][] doubleResult = new double[numBuckets][1];
for (int i = 0; i < numBuckets; i++) {
doubleResult[i][0] = result[i];
}
return new JamaMatrixLib(doubleResult);
}
private int numTrials;
private double[] pi;
}
|
src/blog/distrib/Multinomial.java
|
/*
* Copyright (c) 2005, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of the University of California, Berkeley nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package blog.distrib;
import java.io.Serializable;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import blog.common.numerical.JamaMatrixLib;
import blog.common.numerical.MatrixLib;
import blog.common.Util;
import blog.distrib.AbstractCondProbDistrib;
import blog.model.MatrixSpec;
import blog.model.Type;
/**
* Multinomial distribution.
*
* See https://en.wikipedia.org/wiki/Multinomial_distribution
*/
public class Multinomial extends AbstractCondProbDistrib {
private void init(int numTrials, double[] pi) {
this.numTrials = numTrials;
this.pi = (double[]) pi.clone();
double sum = 0;
for (int i = 0; i < pi.length; i++) {
if (pi[i] < 0) {
throw new IllegalArgumentException("Probability " + pi[i]
+ " for element " + i + " is negative.");
}
sum += pi[i];
}
if (sum < 1e-9) {
throw new IllegalArgumentException("Probabilities sum to approx zero");
}
for (int i = 0; i < pi.length; i++) {
this.pi[i] /= sum;
}
}
/**
* Creates a Multinomial object with probabilities specified by the given
* array.
*
* @throws IllegalArgumentException
* if pi does not define a probability distribution
*/
public Multinomial(int numTrials, double[] pi) {
init(numTrials, pi);
}
public Multinomial(List params) {
if (params.size() != 2) {
throw new IllegalArgumentException("expected numTrials and pi");
}
if (!(params.get(0) instanceof Integer)) {
throw new IllegalArgumentException("expected first arg to be integer numTrials");
}
int numTrials = (int) params.get(0);
Object objectPi = params.get(1);
if (objectPi instanceof MatrixSpec) {
objectPi = ((MatrixSpec) objectPi).getValueIfNonRandom();
}
if (!(objectPi instanceof MatrixLib)) {
throw new IllegalArgumentException(
"expected second arg to be array of reals; got " + objectPi +
" instead, which is of type " + objectPi.getClass().getName());
}
MatrixLib pi = (MatrixLib) objectPi;
if (pi.colLen() != 1) {
throw new IllegalArgumentException("expected second arg to be column vector");
}
double[] nativePi = new double[pi.rowLen()];
for (int i = 0; i < pi.rowLen(); i++) {
nativePi[i] = pi.elementAt(i, 0);
}
init(numTrials, nativePi);
}
/**
* Returns the probability of given vector.
*/
public double getProb(List args, Object value) {
if (!(value instanceof MatrixLib)) {
throw new IllegalArgumentException("expected vector value");
}
final int numBuckets = pi.length;
MatrixLib valueVector = (MatrixLib) value;
if (valueVector.rowLen() != numBuckets || valueVector.colLen() != 1) {
throw new IllegalArgumentException("value has wrong dimension");
}
int sum = 0;
for (int i = 0; i < numBuckets; i++) {
sum += valueVector.elementAt(i, 0);
}
if (sum != numTrials) {
return 0;
}
double prob = Util.factorial(numTrials);
for (int i = 0; i < numBuckets; i++) {
prob *= Math.pow(pi[i], valueVector.elementAt(i, 0));
prob /= Util.factorial((int)Math.round(valueVector.elementAt(i, 0)));
// FIXME: It would be better if we could take the param as an array
// of ints, so we don't have to worry about rounding.
}
return prob;
}
/**
* Returns the log probability of given vector.
*/
public double getLogProb(List args, Object value) {
if (!(value instanceof MatrixLib)) {
throw new IllegalArgumentException("expected vector value");
}
MatrixLib valueVector = (MatrixLib) value;
if (valueVector.rowLen() != numTrials) {
throw new IllegalArgumentException("value has wrong dimension");
}
int sum = 0;
for (int i = 0; i < numTrials; i++) {
sum += valueVector.elementAt(i, 0);
}
if (sum != numTrials) {
return 0;
}
double logProb = Util.logFactorial(numTrials);
for (int i = 0; i < numTrials; i++) {
logProb += valueVector.elementAt(i, 0) * Math.log(pi[i]);
logProb -= Util.logFactorial((int)Math.round(valueVector.elementAt(i, 0)));
}
return logProb;
}
/**
* Returns a vector chosen at random according to this distribution.
*/
public MatrixLib sampleVal(List args, Type childType) {
final int numBuckets = pi.length;
double[] cdf = new double[numBuckets];
cdf[0] = pi[0];
for (int i = 1; i < numBuckets; i++) {
cdf[i] = cdf[i - 1] + pi[i];
}
int[] result = new int[numBuckets];
for (int i = 0; i < numBuckets; i++) {
result[i] = 0;
}
Random rng = new java.util.Random();
for (int trial = 0; trial < numTrials; trial++) {
double val = rng.nextDouble();
int bucket;
for (bucket = 0; bucket < numBuckets; bucket++) {
if (val <= cdf[bucket]) {
break;
}
}
result[bucket] += 1;
}
// Convert to Jama (nasty).
double[][] doubleResult = new double[numBuckets][1];
for (int i = 0; i < numBuckets; i++) {
doubleResult[i][0] = result[i];
}
return new JamaMatrixLib(doubleResult);
}
private int numTrials;
private double[] pi;
}
|
make backwards compatible with java6
|
src/blog/distrib/Multinomial.java
|
make backwards compatible with java6
|
|
Java
|
mit
|
fca2e006c8e8e4386effd3b5f4ab4d4bce071531
| 0
|
Team254/FRC-2015,Team254/FRC-2015,Team254/FRC-2015,Team254/FRC-2015
|
package com.team254.frc2015.behavior;
import com.team254.frc2015.Constants;
import com.team254.frc2015.ElevatorSafety;
import com.team254.frc2015.HardwareAdaptor;
import com.team254.frc2015.behavior.routines.*;
import com.team254.frc2015.subsystems.BottomCarriage;
import com.team254.frc2015.subsystems.Drive;
import com.team254.frc2015.subsystems.Intake;
import com.team254.frc2015.subsystems.TopCarriage;
import com.team254.lib.util.StateHolder;
import com.team254.lib.util.Tappable;
import java.util.Optional;
public class BehaviorManager implements Tappable {
public boolean isZero(double val) {
return val < 0.0001 && val > -0.0001;
}
protected Drive drive = HardwareAdaptor.kDrive;
protected TopCarriage top_carriage = HardwareAdaptor.kTopCarriage;
protected BottomCarriage bottom_carriage = HardwareAdaptor.kBottomCarriage;
protected Intake intake = HardwareAdaptor.kIntake;
private ElevatorSafety.Setpoints m_elevator_setpoints = new ElevatorSafety.Setpoints();
private boolean m_top_jogging = false;
private boolean m_bottom_jogging = false;
private Routine m_cur_routine = null;
private RobotSetpoints m_setpoints;
private ManualRoutine m_manual_routine = new ManualRoutine();
public static SimplePresetRoutine rammingModePresetRoutine = new SimplePresetRoutine() {
@Override
public String getName() {
return "Ramming";
}
@Override
public void setPresets() {
m_bottom_height_setpoint = Optional.of(0.0);
m_top_height_setpoint = Optional.of(6.0);
m_preset_setpoints.intake_action = RobotSetpoints.IntakeAction.OPEN;
}
};
/*public static SimplePresetRoutine coopPresetRoutine = new SimplePresetRoutine() {
@Override
public void setPresets() {
m_bottom_height_setpoint = Optional.of(40.0);
m_top_height_setpoint = Optional.of(Constants.kTopCarriageMaxPositionInches);
m_preset_setpoints.roller_action = RobotSetpoints.RollerAction.EXHAUST;
m_preset_setpoints.intake_action = RobotSetpoints.IntakeAction.OPEN;
}
};*/
private void setNewRoutine(Routine new_routine) {
boolean needs_cancel = new_routine != m_cur_routine && m_cur_routine != null;
boolean needs_reset = new_routine != m_cur_routine && new_routine != null;
if (needs_cancel) {
m_cur_routine.cancel();
}
m_cur_routine = new_routine;
if (needs_reset) {
m_cur_routine.reset();
}
}
public void reset() {
setNewRoutine(null);
}
public BehaviorManager() {
m_setpoints = new RobotSetpoints();
m_setpoints.reset();
}
public void update(Commands commands) {
m_setpoints.reset();
if (m_cur_routine != null && m_cur_routine.isFinished()) {
setNewRoutine(null);
}
if (commands.cancel_current_routine) {
setNewRoutine(null);
} else if (commands.can_grabber_request == Commands.CanGrabberRequests.DO_STAGE && !(m_cur_routine instanceof CanGrabRoutine)) {
setNewRoutine(new CanGrabRoutine());
} else if (commands.human_player_mode && !(m_cur_routine instanceof HumanLoadRoutine)) {
setNewRoutine(new HumanLoadRoutine());
} else if (!commands.human_player_mode && m_cur_routine instanceof HumanLoadRoutine) {
setNewRoutine(null);
} else if (commands.preset_request == Commands.PresetRequest.RAMMING) {
setNewRoutine(rammingModePresetRoutine);
} else if (commands.preset_request == Commands.PresetRequest.COOP && !(m_cur_routine instanceof CoopRoutine)) {
setNewRoutine(new CoopRoutine());
}
if (m_cur_routine != null) {
m_setpoints = m_cur_routine.update(commands, m_setpoints);
}
// Get manual m_setpoints
m_setpoints = m_manual_routine.update(commands, m_setpoints);
boolean can_close_intake = true;
boolean can_control_top_carriage_pivot = true;
boolean can_control_top_carriage_grabber = true;
boolean can_control_bottom_carriage = true;
double bottom_jog_speed = 0.0;
double top_jog_speed = 0.0;
// Set elevator m_setpoints and jog
if (bottom_carriage.isInitialized()) {
if (m_setpoints.bottom_open_loop_jog.isPresent()) {
bottom_jog_speed = m_setpoints.bottom_open_loop_jog.get();
m_bottom_jogging = true;
} else if (m_setpoints.m_elevator_setpoints.bottom_setpoint.isPresent()) {
bottom_carriage.setPositionSetpoint(m_setpoints.m_elevator_setpoints.bottom_setpoint.get(), true);
m_bottom_jogging = false;
} else if (m_bottom_jogging && !m_setpoints.bottom_open_loop_jog.isPresent()) {
bottom_carriage.setOpenLoop(0, true);
m_bottom_jogging = false;
}
}
if (top_carriage.isInitialized()) {
if (m_setpoints.top_open_loop_jog.isPresent()) {
top_jog_speed = m_setpoints.top_open_loop_jog.get();
m_top_jogging = true;
} else if (m_setpoints.m_elevator_setpoints.top_setpoint.isPresent()) {
top_carriage.setPositionSetpoint(m_setpoints.m_elevator_setpoints.top_setpoint.get(), true);
m_top_jogging = false;
} else if (m_top_jogging && !m_setpoints.top_open_loop_jog.isPresent()) {
top_carriage.setOpenLoop(0, true);
m_top_jogging = false;
}
}
if (m_bottom_jogging || m_top_jogging) {
bottom_carriage.setOpenLoop(bottom_jog_speed, isZero(bottom_jog_speed));
top_carriage.setOpenLoop(top_jog_speed, isZero(top_jog_speed));
}
m_elevator_setpoints = ElevatorSafety.generateSafeSetpoints(m_setpoints.m_elevator_setpoints);
if (m_elevator_setpoints.top_setpoint.isPresent() && !m_top_jogging && !m_bottom_jogging) {
top_carriage.setPositionSetpoint(m_elevator_setpoints.top_setpoint.get(), true);
}
if (m_elevator_setpoints.bottom_setpoint.isPresent() && !m_bottom_jogging && !m_bottom_jogging) {
bottom_carriage.setPositionSetpoint(m_elevator_setpoints.bottom_setpoint.get(), true);
}
// can_close_intake = ElevatorSafety.canCloseIntake();
// Top carriage actions.
if (can_control_top_carriage_grabber
&& m_setpoints.claw_action == RobotSetpoints.TopCarriageClawAction.OPEN) {
top_carriage.setGrabberOpen(true);
can_control_top_carriage_pivot = false;
} else if (can_control_top_carriage_grabber
&& m_setpoints.claw_action == RobotSetpoints.TopCarriageClawAction.CLOSE) {
top_carriage.setGrabberOpen(false);
}
if (can_control_top_carriage_pivot
&& m_setpoints.pivot_action == RobotSetpoints.TopCarriagePivotAction.PIVOT_DOWN) {
top_carriage.setPivotDown(true);
} else if (can_control_top_carriage_pivot
&& m_setpoints.pivot_action == RobotSetpoints.TopCarriagePivotAction.PIVOT_UP) {
top_carriage.setPivotDown(false);
}
// Bottom carriage actions.
if (can_control_bottom_carriage
&& m_setpoints.flapper_action == RobotSetpoints.BottomCarriageFlapperAction.OPEN) {
bottom_carriage.setFlapperOpen(true);
} else if (can_control_bottom_carriage
&& m_setpoints.flapper_action == RobotSetpoints.BottomCarriageFlapperAction.CLOSE) {
bottom_carriage.setFlapperOpen(false);
}
/*
if (can_control_bottom_carriage
&& m_setpoints.bottom_carriage_pusher_action == Commands.BottomCarriagePusherRequest.EXTEND) {
bottom_carriage.setPusherExtended(true);
} else if (can_control_bottom_carriage
&& m_setpoints.bottom_carriage_pusher_action == Commands.BottomCarriagePusherRequest.RETRACT) {
bottom_carriage.setPusherExtended(false);
}
*/
// Intake actions.
if (!can_close_intake || m_setpoints.intake_action == RobotSetpoints.IntakeAction.OPEN || m_setpoints.intake_action == RobotSetpoints.IntakeAction.PREFER_OPEN) {
// Open intake
intake.open();
} else if (m_setpoints.intake_action == RobotSetpoints.IntakeAction.CLOSE || m_setpoints.intake_action == RobotSetpoints.IntakeAction.PREFER_CLOSE) {
// Close intake
intake.close();
} else if (m_setpoints.intake_action == RobotSetpoints.IntakeAction.NEUTRAL) {
// Neutral intake
intake.neutral();
}
// Roller actions.
if (m_setpoints.roller_action == RobotSetpoints.RollerAction.INTAKE) {
// Run intake inwards.
intake.setSpeed(Constants.kManualIntakeSpeed);
} else if (m_setpoints.roller_action == RobotSetpoints.RollerAction.EXHAUST) {
// Run intake outwards.
intake.setSpeed(-Constants.kManualIntakeSpeed);
} else if (m_setpoints.roller_action == RobotSetpoints.RollerAction.EXHAUST_COOP) {
// Run intake outwards.
intake.setSpeed(-Constants.kCoopIntakeSpeed);
} else if (m_setpoints.roller_action == RobotSetpoints.RollerAction.EXHAUST_COOP_SLOW) {
// Run intake outwards.
intake.setSpeed(-Constants.kCoopSlowIntakeSpeed);
} else {
// Stop intake.
intake.setSpeed(0.0);
}
}
@Override
public void getState(StateHolder states) {
states.put("mode", m_cur_routine != null ? m_cur_routine.getName() : "---" );
}
@Override
public String getName() {
return "behaviors";
}
}
|
src/com/team254/frc2015/behavior/BehaviorManager.java
|
package com.team254.frc2015.behavior;
import com.team254.frc2015.Constants;
import com.team254.frc2015.ElevatorSafety;
import com.team254.frc2015.HardwareAdaptor;
import com.team254.frc2015.behavior.routines.*;
import com.team254.frc2015.subsystems.BottomCarriage;
import com.team254.frc2015.subsystems.Drive;
import com.team254.frc2015.subsystems.Intake;
import com.team254.frc2015.subsystems.TopCarriage;
import com.team254.lib.util.StateHolder;
import com.team254.lib.util.Tappable;
import java.util.Optional;
public class BehaviorManager implements Tappable {
public boolean isZero(double val) {
return val < 0.0001 && val > -0.0001;
}
protected Drive drive = HardwareAdaptor.kDrive;
protected TopCarriage top_carriage = HardwareAdaptor.kTopCarriage;
protected BottomCarriage bottom_carriage = HardwareAdaptor.kBottomCarriage;
protected Intake intake = HardwareAdaptor.kIntake;
private ElevatorSafety.Setpoints m_elevator_setpoints = new ElevatorSafety.Setpoints();
private boolean m_top_jogging = false;
private boolean m_bottom_jogging = false;
private Routine m_cur_routine = null;
private RobotSetpoints m_setpoints;
private ManualRoutine m_manual_routine = new ManualRoutine();
public static SimplePresetRoutine rammingModePresetRoutine = new SimplePresetRoutine() {
@Override
public String getName() {
return "Ramming";
}
@Override
public void setPresets() {
m_bottom_height_setpoint = Optional.of(0.0);
m_top_height_setpoint = Optional.of(6.0);
m_preset_setpoints.intake_action = RobotSetpoints.IntakeAction.OPEN;
}
};
/*public static SimplePresetRoutine coopPresetRoutine = new SimplePresetRoutine() {
@Override
public void setPresets() {
m_bottom_height_setpoint = Optional.of(40.0);
m_top_height_setpoint = Optional.of(Constants.kTopCarriageMaxPositionInches);
m_preset_setpoints.roller_action = RobotSetpoints.RollerAction.EXHAUST;
m_preset_setpoints.intake_action = RobotSetpoints.IntakeAction.OPEN;
}
};*/
private void setNewRoutine(Routine new_routine) {
boolean needs_cancel = new_routine != m_cur_routine && m_cur_routine != null;
boolean needs_reset = new_routine != m_cur_routine && new_routine != null;
if (needs_cancel) {
m_cur_routine.cancel();
}
m_cur_routine = new_routine;
if (needs_reset) {
m_cur_routine.reset();
}
}
public void reset() {
setNewRoutine(null);
}
public BehaviorManager() {
m_setpoints = new RobotSetpoints();
m_setpoints.reset();
}
public void update(Commands commands) {
m_setpoints.reset();
if (m_cur_routine != null && m_cur_routine.isFinished()) {
setNewRoutine(null);
}
if (commands.cancel_current_routine) {
setNewRoutine(null);
} else if (commands.can_grabber_request == Commands.CanGrabberRequests.DO_STAGE && !(m_cur_routine instanceof CanGrabRoutine)) {
setNewRoutine(new CanGrabRoutine());
} else if (commands.human_player_mode && !(m_cur_routine instanceof HumanLoadRoutine)) {
setNewRoutine(new HumanLoadRoutine());
} else if (!commands.human_player_mode && m_cur_routine instanceof HumanLoadRoutine) {
setNewRoutine(null);
} else if (commands.preset_request == Commands.PresetRequest.RAMMING) {
setNewRoutine(rammingModePresetRoutine);
} else if (commands.preset_request == Commands.PresetRequest.COOP && !(m_cur_routine instanceof CoopRoutine)) {
setNewRoutine(new CoopRoutine());
}
if (m_cur_routine != null) {
m_setpoints = m_cur_routine.update(commands, m_setpoints);
}
// Get manual m_setpoints
m_setpoints = m_manual_routine.update(commands, m_setpoints);
boolean can_close_intake = true;
boolean can_control_top_carriage_pivot = true;
boolean can_control_top_carriage_grabber = true;
boolean can_control_bottom_carriage = true;
// Set elevator m_setpoints and jog
if (bottom_carriage.isInitialized()) {
if (m_setpoints.bottom_open_loop_jog.isPresent()) {
double jog = m_setpoints.bottom_open_loop_jog.get();
bottom_carriage.setOpenLoop(jog, isZero(jog));
m_bottom_jogging = true;
} else if (m_setpoints.m_elevator_setpoints.bottom_setpoint.isPresent()) {
bottom_carriage.setPositionSetpoint(m_setpoints.m_elevator_setpoints.bottom_setpoint.get(), true);
m_bottom_jogging = false;
} else if (m_bottom_jogging && !m_setpoints.bottom_open_loop_jog.isPresent()) {
bottom_carriage.setOpenLoop(0, true);
m_bottom_jogging = false;
}
}
if (top_carriage.isInitialized()) {
if (m_setpoints.top_open_loop_jog.isPresent()) {
double jog = m_setpoints.top_open_loop_jog.get();
top_carriage.setOpenLoop(jog, isZero(jog));
m_top_jogging = true;
} else if (m_setpoints.m_elevator_setpoints.top_setpoint.isPresent()) {
top_carriage.setPositionSetpoint(m_setpoints.m_elevator_setpoints.top_setpoint.get(), true);
m_top_jogging = false;
} else if (m_top_jogging && !m_setpoints.top_open_loop_jog.isPresent()) {
top_carriage.setOpenLoop(0, true);
m_top_jogging = false;
}
}
m_elevator_setpoints = ElevatorSafety.generateSafeSetpoints(m_setpoints.m_elevator_setpoints);
if (m_elevator_setpoints.top_setpoint.isPresent() && !m_top_jogging && !m_bottom_jogging) {
top_carriage.setPositionSetpoint(m_elevator_setpoints.top_setpoint.get(), true);
}
if (m_elevator_setpoints.bottom_setpoint.isPresent() && !m_bottom_jogging && !m_bottom_jogging) {
bottom_carriage.setPositionSetpoint(m_elevator_setpoints.bottom_setpoint.get(), true);
}
// can_close_intake = ElevatorSafety.canCloseIntake();
// Top carriage actions.
if (can_control_top_carriage_grabber
&& m_setpoints.claw_action == RobotSetpoints.TopCarriageClawAction.OPEN) {
top_carriage.setGrabberOpen(true);
can_control_top_carriage_pivot = false;
} else if (can_control_top_carriage_grabber
&& m_setpoints.claw_action == RobotSetpoints.TopCarriageClawAction.CLOSE) {
top_carriage.setGrabberOpen(false);
}
if (can_control_top_carriage_pivot
&& m_setpoints.pivot_action == RobotSetpoints.TopCarriagePivotAction.PIVOT_DOWN) {
top_carriage.setPivotDown(true);
} else if (can_control_top_carriage_pivot
&& m_setpoints.pivot_action == RobotSetpoints.TopCarriagePivotAction.PIVOT_UP) {
top_carriage.setPivotDown(false);
}
// Bottom carriage actions.
if (can_control_bottom_carriage
&& m_setpoints.flapper_action == RobotSetpoints.BottomCarriageFlapperAction.OPEN) {
bottom_carriage.setFlapperOpen(true);
} else if (can_control_bottom_carriage
&& m_setpoints.flapper_action == RobotSetpoints.BottomCarriageFlapperAction.CLOSE) {
bottom_carriage.setFlapperOpen(false);
}
/*
if (can_control_bottom_carriage
&& m_setpoints.bottom_carriage_pusher_action == Commands.BottomCarriagePusherRequest.EXTEND) {
bottom_carriage.setPusherExtended(true);
} else if (can_control_bottom_carriage
&& m_setpoints.bottom_carriage_pusher_action == Commands.BottomCarriagePusherRequest.RETRACT) {
bottom_carriage.setPusherExtended(false);
}
*/
// Intake actions.
if (!can_close_intake || m_setpoints.intake_action == RobotSetpoints.IntakeAction.OPEN || m_setpoints.intake_action == RobotSetpoints.IntakeAction.PREFER_OPEN) {
// Open intake
intake.open();
} else if (m_setpoints.intake_action == RobotSetpoints.IntakeAction.CLOSE || m_setpoints.intake_action == RobotSetpoints.IntakeAction.PREFER_CLOSE) {
// Close intake
intake.close();
} else if (m_setpoints.intake_action == RobotSetpoints.IntakeAction.NEUTRAL) {
// Neutral intake
intake.neutral();
}
// Roller actions.
if (m_setpoints.roller_action == RobotSetpoints.RollerAction.INTAKE) {
// Run intake inwards.
intake.setSpeed(Constants.kManualIntakeSpeed);
} else if (m_setpoints.roller_action == RobotSetpoints.RollerAction.EXHAUST) {
// Run intake outwards.
intake.setSpeed(-Constants.kManualIntakeSpeed);
} else if (m_setpoints.roller_action == RobotSetpoints.RollerAction.EXHAUST_COOP) {
// Run intake outwards.
intake.setSpeed(-Constants.kCoopIntakeSpeed);
} else if (m_setpoints.roller_action == RobotSetpoints.RollerAction.EXHAUST_COOP_SLOW) {
// Run intake outwards.
intake.setSpeed(-Constants.kCoopSlowIntakeSpeed);
} else {
// Stop intake.
intake.setSpeed(0.0);
}
}
@Override
public void getState(StateHolder states) {
states.put("mode", m_cur_routine != null ? m_cur_routine.getName() : "---" );
}
@Override
public String getName() {
return "behaviors";
}
}
|
Give up controllers when any jog button is hit
|
src/com/team254/frc2015/behavior/BehaviorManager.java
|
Give up controllers when any jog button is hit
|
|
Java
|
mit
|
43e823b5add084d8424865bf022e5559a7ade6b1
| 0
|
mhogrefe/wheels
|
package mho.wheels.iterables;
import mho.wheels.structures.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.*;
import java.util.function.Function;
import java.util.function.Predicate;
import static mho.wheels.ordering.Ordering.*;
/**
* Methods for generating and manipulating {@link Iterable}s. The equivalents of every function in Haskell's
* {@code Data.List} module may be found here (except for {@code permutations} and {@code subsequences}, which are in
* {@link mho.wheels.math.Combinatorics}).
*/
public final class IterableUtils {
/**
* Disallow instantiation
*/
private IterableUtils() {}
/**
* Adds an {@code Iterable}'s elements to a {@link Collection}, in the order that the elements appear in the
* {@code Iterable}. Only works for finite {@code Iterable}s.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>{@code collection} must be non-null.</li>
* <li>{@code collection} must be able to hold every element of {@code xs}.</li>
* </ul>
*
* @param xs the {@code Iterable}
* @param collection the {@code Collection} to which the {@code Iterable}'s elements are added
* @param <T> the {@code Iterable}'s element type
*/
public static <T> void addTo(@NotNull Iterable<T> xs, @NotNull Collection<T> collection) {
for (T x : xs) {
collection.add(x);
}
}
/**
* Adds a {@code String}'s characters to a {@code Collection}, in the order that the characters appear in the
* {@code String}.
*
* <ul>
* <li>{@code s} must be non-null.</li>
* <li>{@code collection} must be non-null.</li>
* <li>{@code collection} must be able to hold every character of {@code s}.</li>
* </ul>
*
* @param s the string
* @param collection the collection to which the {@code String}'s characters are added
*/
public static void addTo(@NotNull String s, @NotNull Collection<Character> collection) {
for (int i = 0; i < s.length(); i++) {
collection.add(s.charAt(i));
}
}
/**
* Converts an {@code Iterable} to a {@link List}. Only works for finite {@code Iterable}s. The resulting list may
* be modified, but the modifications will not affect the original {@code Iterable}.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result is non-null.</li>
* </ul>
*
* @param xs the {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return a {@code List} containing the elements of the {@code Iterable} in their original order
*/
public static @NotNull <T> List<T> toList(@NotNull Iterable<T> xs) {
List<T> list = new ArrayList<>();
addTo(xs, list);
return list;
}
/**
* Converts an {@code Iterable} to a {@code List}. Only works for finite {@code Iterable}s.
*
* <ul>
* <li>{@code s} may be any {@code String}.</li>
* <li>The result is non-null.</li>
* </ul>
*
* @param s the {@code String}
* @return a {@code List} containing the characters of {@code s} in their original order
*/
public static @NotNull List<Character> toList(@NotNull String s) {
List<Character> list = new ArrayList<>();
addTo(s, list);
return list;
}
/**
* Creates a {@code String} representation of {@code xs}. Each element is converted to a {@code String} and
* those {@code String}s are placed in a comma-separated list surrounded by square brackets. Only works for finite
* {@code Iterable}s.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result begins with {@code '['} and ends with {@code ']'}.</li>
* </ul>
*
* @param xs the {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return a {@code String} representation of {@code xs}
*/
public static @NotNull <T> String toString(@NotNull Iterable<T> xs) {
return toList(xs).toString();
}
/**
* Creates a {@code String} representation of {@code xs}, displaying at most {@code size} elements. The first
* {@code size} elements are converted to a {@code String} and those {@code String}s are placed in a
* comma-separated list surrounded by square brackets. If the {@code Iterable} contains more than {@code size}
* elements, an ellipsis ({@code ...}) is added at the end of the list.
*
* <ul>
* <li>{@code size} must be non-negative.</li>
* <li>{@code xs} may be any {@code Iterable}.</li>
* <li>The result begins with {@code '['} and ends with {@code ']'}.</li>
* </ul>
*
* @param size the maximum number of elements displayed
* @param xs the {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return a {@code String} representation of {@code xs}
*/
public static @NotNull <T> String toString(int size, @NotNull Iterable<T> xs) {
if (size < 0)
throw new IllegalArgumentException("size cannot be negative");
if (size == 0) {
return isEmpty(xs) ? "[]" : "[...]";
}
List<T> list = toList(take(size + 1, xs));
String listString = toList(take(size, list)).toString();
if (list.size() > size) {
listString = init(listString) + ", ...]";
}
return listString;
}
/**
* Converts a {@code String} to an {@code Iterable} of {@code Character}s. The order of the characters is
* preserved. Uses O(1) additional memory. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code s} must be non-null.</li>
* <li>The result is finite and does not contain any nulls.</li>
* </ul>
*
* @param s the {@code String}
* @return an {@code Iterable} containing all the {@code String}'s characters in their original order
*/
public static @NotNull Iterable<Character> fromString(@NotNull String s) {
return () -> new Iterator<Character>() {
private int i = 0;
@Override
public boolean hasNext() {
return i < s.length();
}
@Override
public Character next() {
return s.charAt(i++);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Creates a {@code String} from an {@code Iterable} of {@code Character}s. The order of the characters is
* preserved. Only works for finite {@code Iterable}s.
*
* <ul>
* <li>{@code cs} must be finite and cannot contain nulls.</li>
* <li>The result is non-null.</li>
* </ul>
*
* @param cs the {@code Iterable} of {@code Character}s
* @return the {@code String} containing all of {@code chars}'s characters in their original order
*/
public static @NotNull String charsToString(@NotNull Iterable<Character> cs) {
StringBuilder sb = new StringBuilder();
for (char c : cs) {
sb.append(c);
}
return sb.toString();
}
/**
* Generates all {@link Byte}s greater than or equal to {@code a}, in order. Does not wrap around after reaching
* {@code Byte.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code byte}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Byte}s ending in 2<sup>7</sup>–1.</li>
* </ul>
*
* Length is 2<sup>7</sup>–{@code a}
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Byte> range(byte a) {
return range(a, Byte.MAX_VALUE);
}
/**
* Generates all {@link Short}s greater than or equal to {@code a}, in order. Does not wrap around after reaching
* {@code Short.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code short}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Short}s ending in 2<sup>15</sup>–1.</li>
* </ul>
*
* Length is 2<sup>15</sup>–{@code a}
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Short> range(short a) {
return range(a, Short.MAX_VALUE);
}
/**
* Generates all {@link Integer}s greater than or equal to {@code a}, in order. Does not wrap around after reaching
* {@code Integer.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code int}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Integer}s ending in
* 2<sup>31</sup>–1.</li>
* </ul>
*
* Length is 2<sup>31</sup>–{@code a}
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Integer> range(int a) {
return range(a, Integer.MAX_VALUE);
}
/**
* Generates all {@link Long}s greater than or equal to {@code a}, in order. Does not wrap around after reaching
* {@code Long.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code long}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Long}s ending in 2<sup>63</sup>–1.</li>
* </ul>
*
* Length is 2<sup>63</sup>–{@code a}
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Long> range(long a) {
return range(a, Long.MAX_VALUE);
}
/**
* Generates all {@link BigInteger}s greater than or equal to {@code a}, in order. The {@code Iterable} produced
* does not support removing elements.
*
* <ul>
* <li>{@code a} must be non-null.</li>
* <li>The result is an infinite {@code Iterable} of consecutive ascending {@code BigInteger}s.</li>
* </ul>
*
* Length is infinite
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<BigInteger> range(@NotNull BigInteger a) {
return iterate(i -> i.add(BigInteger.ONE), a);
}
/**
* Generates all {@link BigDecimal}s of the form {@code a}+n where n is a non-negative integer, in order. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} must be non-null.</li>
* <li>The result is an infinite {@code Iterable} of ascending {@code BigDecimal}s differing by 1.</li>
* </ul>
*
* Length is infinite
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<BigDecimal> range(@NotNull BigDecimal a) {
return iterate(i -> i.add(BigDecimal.ONE), a);
}
/**
* Generates all {@link Character}s greater than or equal to {@code a}, in order. Does not wrap around after
* reaching {@code Character.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code char}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Character}s ending in
* {@code \uffff}.</li>
* </ul>
*
* Length is 2<sup>16</sup>–{@code a}
*
* @param a the starting value of this {@code Character} sequence
* @return an sequence of consecutive {@code Character}s, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Character> range(char a) {
return range(a, Character.MAX_VALUE);
}
/**
* Generates all {@link float}s roughly of the form {@code a}+n where n is a non-negative integer, in order.
* {@code a} is converted to a {@code BigDecimal} internally to minimize rounding errors. Nonetheless, rounding may
* produce some odd-seeming results: for example, if {@code a} is large, the result might contain runs of identical
* {@code float}s. If {@code a} is {@code -Infinity}, the result is {@code -Infinity} repeating forever. If
* {@code a} is {@code +Infinity}, the result is a single {@code +Infinity}. If {@code a} is negative zero, the
* first element of the result is also negative zero. {@code NaN} is not a legal input. The {@code Iterable}
* produced does not support removing elements.
*
* <ul>
* <li>{@code a} cannot be {@code NaN}.</li>
* <li>The result is either {@code [+Infinity]}, or an infinite non-descending {@code Iterable} of {@code float}s
* roughly differing by 1.</li>
* </ul>
*
* Length is 1 if {@code a} is {@code +Infinity}, infinite otherwise
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Float> range(float a) {
if (Float.isNaN(a))
throw new IllegalArgumentException("cannot begin a range with NaN");
if (Float.isInfinite(a)) {
return a < 0 ? cycle(Arrays.asList(Float.NEGATIVE_INFINITY)) : Arrays.asList(Float.POSITIVE_INFINITY);
}
Iterable<Float> fs = map(BigDecimal::floatValue, range(new BigDecimal(Float.toString(a))));
return Float.valueOf(a).equals(-0.0f) ? cons(-0.0f, tail(fs)): fs;
}
/**
* Generates all {@link double}s roughly of the form {@code a}+n where n is a non-negative integer, in order.
* {@code a} is converted to a {@code BigDecimal} internally to minimize rounding errors. Nonetheless, rounding may
* produce some odd-seeming results: for example, if {@code a} is large, the result might contain runs of identical
* {@code double}s. If {@code a} is {@code -Infinity}, the result is {@code -Infinity} repeating forever. If
* {@code a} is {@code +Infinity}, the result is a single {@code +Infinity}. If {@code a} is negative zero, the
* first element of the result is also negative zero. {@code NaN} is not a legal input. The {@code Iterable}
* produced does not support removing elements.
*
* <ul>
* <li>{@code a} cannot be {@code NaN}.</li>
* <li>The result is either {@code [+Infinity]}, or an infinite non-descending {@code Iterable} of {@code double}s
* roughly differing by 1.</li>
* </ul>
*
* Length is 1 if {@code a} is {@code +Infinity}, infinite otherwise
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Double> range(double a) {
if (Double.isNaN(a))
throw new IllegalArgumentException("cannot begin a range with NaN");
if (Double.isInfinite(a)) {
return a < 0 ? cycle(Arrays.asList(Double.NEGATIVE_INFINITY)) : Arrays.asList(Double.POSITIVE_INFINITY);
}
Iterable<Double> ds = map(BigDecimal::doubleValue, range(BigDecimal.valueOf(a)));
return Double.valueOf(a).equals(-0.0) ? cons(-0.0, tail(ds)) : ds;
}
/**
* Generates all {@code Byte}s greater than or equal to {@code a} and less than or equal to {@code b}, in order.
* If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code byte}.</li>
* <li>{@code b} may be any {@code byte}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Byte}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Byte> range(byte a, byte b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Byte>() {
private byte x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Byte next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code Short}s greater than or equal to {@code a} and less than or equal to {@code b}, in order.
* If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code short}.</li>
* <li>{@code b} may be any {@code short}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Short}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Short> range(short a, short b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Short>() {
private short x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Short next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code Integer}s greater than or equal to {@code a} and less than or equal to {@code b}, in order.
* If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code int}.</li>
* <li>{@code b} may be any {@code int}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Integer}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Integer> range(int a, int b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Integer>() {
private int x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Integer next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code Long}s greater than or equal to {@code a} and less than or equal to {@code b}, in order.
* If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code long}.</li>
* <li>{@code b} may be any {@code long}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Long}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Long> range(long a, long b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Long>() {
private long x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Long next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code BigInteger}s greater than or equal to {@code a} and less than or equal to {@code b}, in
* order. If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced
* does not support removing elements.
*
* <ul>
* <li>{@code a} must be non-null.</li>
* <li>{@code b} must be non-null.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code BigInteger}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<BigInteger> range(@NotNull BigInteger a, @NotNull BigInteger b) {
if (gt(a, b)) return new ArrayList<>();
return () -> new Iterator<BigInteger>() {
private BigInteger x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigInteger next() {
reachedEnd = x.equals(b);
BigInteger oldX = x;
x = x.add(BigInteger.ONE);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@link BigDecimal}s greater than or equal to {@code a} and less than or equal to {@code b} of the
* form {@code a}+n where n is an integer, in order. If {@code a}{@literal >}{@code b}, an empty {@code Iterable}
* is returned. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} must be non-null.</li>
* <li>{@code b} must be non-null.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive {@code BigDecimal}s differing by 1.</li>
* </ul>
*
* Length is max(⌊{@code b}–{@code a}⌋+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<BigDecimal> range(@NotNull BigDecimal a, @NotNull BigDecimal b) {
if (gt(a, b)) return new ArrayList<>();
return () -> new Iterator<BigDecimal>() {
private BigDecimal x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigDecimal next() {
reachedEnd = gt(x.add(BigDecimal.ONE), b);
BigDecimal oldX = x;
x = x.add(BigDecimal.ONE);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code Character}s greater than or equal to {@code a} and less than or equal to {@code b}, in
* order. If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced
* does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code char}.</li>
* <li>{@code b} may be any {@code char}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Character}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this {@code Character} sequence
* @param b the ending value of this {@code Character} sequence
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Character> range(char a, char b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Character>() {
private char x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Character next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code float}s greater than or equal to {@code a} and less than or equal to {@code b} roughly of
* the form {@code a}+n where n is a non-negative integer, in order. {@code a} and {@code b} are converted to
* {@code BigDecimal}s internally to minimize rounding errors. Nonetheless, rounding may produce some odd-seeming
* results: for example, if {@code a} is large, the result might contain runs of identical {@code float}s. If
* {@code a}{@literal >}{@code b}, the result is empty. If {@code a}={@code b}, an {@code Iterable} containing only
* {@code a} is returned. If {@code a} is {@code -Infinity} and {@code b} is not {@code -Infinity}, the result is
* {@code -Infinity} repeating forever. If {@code a} is negative zero and {@code b} is nonnegative, the first
* element of the result is also negative zero. Neither {@code a} nor {@code b} may be {@code NaN}. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} cannot be {@code NaN}.</li>
* <li>{@code b} cannot be {@code NaN}.</li>
* <li>The result is a possibly-empty non-descending {@code Iterable} of {@code float}s roughly differing by
* 1.</li>
* </ul>
*
* Length is 0 if {@code a}{@literal >}{@code b}, 1 if {@code a}={@code b}, infinite if {@code a} is
* {@code -Infinity} or {@code b} is {@code Infinity}, and ⌊{@code new BigDecimal(b)}–{@code new BigDecimal(a)}⌋+1
* otherwise
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at the
* largest {@code float} an integer away from {@code a} and less than or equal to {@code b}.
*/
public static @NotNull Iterable<Float> range(float a, float b) {
if (Float.isNaN(a) || Float.isNaN(b))
throw new IllegalArgumentException("cannot begin or end a range with NaN");
if (a == b) return Arrays.asList(a);
if (a > b) return new ArrayList<>();
if (Float.isInfinite(a)) {
return a < 0 ? cycle(Arrays.asList(Float.NEGATIVE_INFINITY)) : Arrays.asList(Float.POSITIVE_INFINITY);
}
if (Float.isInfinite(b)) {
return range(a);
}
Iterable<Float> fs = map(
BigDecimal::floatValue,
range(new BigDecimal(Float.toString(a)), new BigDecimal(Float.toString(b)))
);
return Float.valueOf(a).equals(-0.0f) ? cons(-0.0f, tail(fs)): fs;
}
/**
* Generates all {@code double}s greater than or equal to {@code a} and less than or equal to {@code b} roughly of
* the form {@code a}+n where n is a non-negative integer, in order. {@code a} and {@code b} are converted to
* {@code BigDecimal}s internally to minimize rounding errors. Nonetheless, rounding may produce some odd-seeming
* results: for example, if {@code a} is large, the result might contain runs of identical {@code double}s. If
* {@code a}{@literal >}{@code b}, the result is empty. If {@code a}={@code b}, an {@code Iterable} containing only
* {@code a} is returned. If {@code a} is {@code -Infinity} and {@code b} is not {@code -Infinity}, the result is
* {@code -Infinity} repeating forever. If {@code a} is negative zero and {@code b} is nonnegative, the first
* element of the result is also negative zero. Neither {@code a} nor {@code b} may be {@code NaN}. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} cannot be {@code NaN}.</li>
* <li>{@code b} cannot be {@code NaN}.</li>
* <li>The result is a possibly-empty non-descending {@code Iterable} of {@code double}s roughly differing by
* 1.</li>
* </ul>
*
* Length is 0 if {@code a}{@literal >}{@code b}, 1 if {@code a}={@code b}, infinite if {@code a} is
* {@code -Infinity} or {@code b} is {@code Infinity}, and ⌊{@code new BigDecimal(b)}–{@code new BigDecimal(a)}⌋+1
* otherwise
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at the
* largest {@code double} an integer away from {@code a} and less than or equal to {@code b}.
*/
public static @NotNull Iterable<Double> range(double a, double b) {
if (Double.isNaN(a) || Double.isNaN(b))
throw new IllegalArgumentException("cannot begin or end a range with NaN");
if (a == b) return Arrays.asList(a);
if (a > b) return new ArrayList<>();
if (Double.isInfinite(a)) {
return a < 0 ? cycle(Arrays.asList(Double.NEGATIVE_INFINITY)) : Arrays.asList(Double.POSITIVE_INFINITY);
}
if (Double.isInfinite(b)) {
return range(a);
}
Iterable<Double> ds = map(
BigDecimal::doubleValue,
range(new BigDecimal(Double.toString(a)), new BigDecimal(Double.toString(b)))
);
return Double.valueOf(a).equals(-0.0) ? cons(-0.0, tail(ds)): ds;
}
public static @NotNull Iterable<Byte> rangeBy(byte a, byte i) {
return () -> new Iterator<Byte>() {
private byte x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Byte next() {
byte oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<Short> rangeBy(short a, short i) {
return () -> new Iterator<Short>() {
private short x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Short next() {
short oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<Integer> rangeBy(int a, int i) {
return () -> new Iterator<Integer>() {
private int x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Integer next() {
int oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<Long> rangeBy(long a, long i) {
return () -> new Iterator<Long>() {
private long x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Long next() {
long oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<BigInteger> rangeBy(@NotNull BigInteger a, @NotNull BigInteger i) {
return () -> new Iterator<BigInteger>() {
private BigInteger x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigInteger next() {
BigInteger oldX = x;
x = x.add(i);
reachedEnd = i.signum() == 1 ? lt(x, a) : gt(x, a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<BigDecimal> rangeBy(@NotNull BigDecimal a, @NotNull BigDecimal i) {
return () -> new Iterator<BigDecimal>() {
private BigDecimal x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigDecimal next() {
BigDecimal oldX = x;
x = x.add(i);
reachedEnd = i.signum() == 1 ? lt(x, a) : gt(x, a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<Character> rangeBy(char a, int i) {
return () -> new Iterator<Character>() {
private char x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Character next() {
char oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Byte> rangeBy(byte a, byte i, byte b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Byte>() {
private byte x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Byte next() {
byte oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Short> rangeBy(short a, short i, short b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Short>() {
private short x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Short next() {
short oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Integer> rangeBy(int a, int i, int b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Integer>() {
private int x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Integer next() {
int oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Long> rangeBy(long a, long i, long b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Long>() {
private long x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Long next() {
long oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<BigInteger> rangeBy(BigInteger a, BigInteger i, BigInteger b) {
if (i.signum() == 1 ? gt(a, b) : gt(b, a)) return new ArrayList<>();
return () -> new Iterator<BigInteger>() {
private BigInteger x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigInteger next() {
BigInteger oldX = x;
x = x.add(i);
reachedEnd = i.signum() == 1 ? gt(x, b) : lt(x, b);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<BigDecimal> rangeBy(BigDecimal a, BigDecimal i, BigDecimal b) {
if (i.signum() == 1 ? gt(a, b) : gt(b, a)) return new ArrayList<>();
return () -> new Iterator<BigDecimal>() {
private BigDecimal x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigDecimal next() {
BigDecimal oldX = x;
x = x.add(i);
reachedEnd = i.signum() == 1 ? gt(x, b) : lt(x, b);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Character> rangeBy(char a, int i, char b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Character>() {
private char x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Character next() {
char oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code (:)} list constructor. Creates an {@code Iterable} whose first element is
* {@code x} and whose remaining elements are given by {@code xs}. {@code xs} may be infinite, in which case the
* result is also infinite. Uses O(1) additional memory. The {@code Iterable} produced does not support removing
* elements.
*
* <ul>
* <li>{@code x} can be anything.</li>
* <li>{@code xs} must be non-null.</li>
* <li>The result is a non-empty {@code Iterable}.</li>
* </ul>
*
* Result length is |{@code xs}|+1
*
* @param x the first element of the {@code Iterable} to be created
* @param xs the second-through-last elements of the {@code Iterable} to be created
* @param <T> the element type of the {@code Iterable} to be created
* @return the {@code Iterable} to be created
*/
public static @NotNull <T> Iterable<T> cons(@Nullable T x, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private boolean readHead = false;
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return !readHead || xsi.hasNext();
}
@Override
public T next() {
if (readHead) {
return xsi.next();
} else {
readHead = true;
return x;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code (:)} list constructor. Creates a {@code String} whose first character is
* {@code c} and whose remaining characters are given by {@code cs}. Uses O(n) additional memory, where n is the
* length of cs.
*
* <ul>
* <li>{@code c} can be anything.</li>
* <li>{@code cs} must be non-null.</li>
* <li>The result is a non-empty {@code String}.</li>
* </ul>
*
* Result length is |{@code cs}|+1
*
* @param c the first character of the {@code String} to be created
* @param cs the second-through-last characters of the {@code String} to be created
* @return the {@code String} to be created
*/
public static @NotNull String cons(char c, @NotNull String cs) {
return Character.toString(c) + cs;
}
/**
* Equivalent of Haskell's {@code (++)} operator. Creates an {@code Iterable} consisting of {@code xs}'s
* elements followed by {@code ys}'s elements. {@code xs} may be infinite, in which case the result will be equal
* to {@code xs}. {@code ys} may be infinite, in which case the result will also be infinite. Uses O(1)
* additional memory. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>{@code ys} must be non-null.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|+|{@code ys}|
*
* @param xs an {@code Iterable}
* @param ys another {@code Iterable}
* @param <T> the element type of the {@code Iterable} to be created
* @return {@code xs} concatenated with {@code ys}
*/
public static @NotNull <T> Iterable<T> concat(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private final Iterator<T> ysi = ys.iterator();
@Override
public boolean hasNext() {
return xsi.hasNext() || ysi.hasNext();
}
@Override
public T next() {
return (xsi.hasNext() ? xsi : ysi).next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code (++)} operator. Creates a {@code String} consisting of {@code s}'s characters
* followed by {@code t}'s characters. Uses O(n+m) additional memory, where n is the length of {@code s} and m is
* the length of {@code t}.
*
* <ul>
* <li>{@code s} must be non-null.</li>
* <li>{@code t} must be non-null.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code s}|+|{@code t}|
*
* @param s a {@code String}
* @param t a {@code String}
* @return {@code s} concatenated with {@code t}
*/
public static @NotNull String concat(@NotNull String s, @NotNull String t) {
return s + t;
}
/**
* Equivalent of Haskell's {@code head} function. Returns the first element of an {@code Iterable}. Works on
* infinite {@code Iterable}s. Uses O(1) additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return the {@code Iterable}'s first element
*/
public static @Nullable <T> T head(@NotNull Iterable<T> xs) {
return xs.iterator().next();
}
/**
* Equivalent of Haskell's {@code head} function. Returns the first element of a {@code List}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs a {@code List}
* @param <T> the {@code List}'s element type
* @return the {@code List}'s first element
*/
public static @Nullable <T> T head(@NotNull List<T> xs) {
return xs.get(0);
}
/**
* Equivalent of Haskell's {@code head} function. Returns the first element of a {@code SortedSet}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs a {@code SortedSet}
* @param <T> the {@code SortedSet}'s element type
* @return the {@code SortedSet}'s first element
*/
public static @Nullable <T> T head(@NotNull SortedSet<T> xs) {
return xs.first();
}
/**
* Equivalent of Haskell's {@code head} function. Returns the first character of a {@code String}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code s} must be non-empty.</li>
* <li>The result may be any {@code char}.</li>
* </ul>
*
* @param s a {@code String}
* @return the {@code String}'s first character
*/
public static char head(@NotNull String s) {
return s.charAt(0);
}
/**
* Equivalent of Haskell's {@code last} function. Returns the last element of an {@code Iterable}. Only works on
* finite {@code Iterable}s. Uses O(1) additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty and finite.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return the {@code Iterable}'s last element
*/
public static @Nullable <T> T last(@NotNull Iterable<T> xs) {
T previous = null;
boolean empty = true;
for (T x : xs) {
empty = false;
previous = x;
}
if (empty)
throw new NoSuchElementException();
return previous;
}
/**
* Equivalent of Haskell's {@code last} function. Returns the last element of a {@code List}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs a {@code List}
* @param <T> the {@code List}'s element type
* @return the {@code List}'s last element
*/
public static @Nullable <T> T last(@NotNull List<T> xs) {
return xs.get(xs.size() - 1);
}
/**
* Equivalent of Haskell's {@code last} function. Returns the last element of a {@code SortedSet}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs a {@code SortedSet}
* @param <T> the {@code SortedSet}'s element type
* @return the {@code SortedSet}'s last element
*/
public static @Nullable <T> T last(@NotNull SortedSet<T> xs) {
return xs.last();
}
/**
* Equivalent of Haskell's {@code last} function. Returns the last character of a {@code String}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code s} must be non-empty.</li>
* <li>The result may be any {@code char}.</li>
* </ul>
*
* @param s a {@code String}
* @return the {@code String}'s last character
*/
public static char last(@NotNull String s) {
return s.charAt(s.length() - 1);
}
/**
* Equivalent of Haskell's {@code tail} function. Returns all elements of an {@code Iterable} but the first.
* {@code xs} may be infinite, in which the result will also be infinite. Uses O(1) additional memory. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|–1
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return an {@code Iterable} containing all elements of {@code xs} but the first
*/
public static @NotNull <T> Iterable<T> tail(@NotNull Iterable<T> xs) {
if (isEmpty(xs))
throw new NoSuchElementException();
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
{
xsi.next();
}
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code tail} function. Given a {@code String}, returns a {@code String} containing
* all of its characters but the first. Uses O(n) additional memory, where n is the length of {@code s}.
*
* <ul>
* <li>{@code s} must be non-empty.</li>
* <li>The result may be any {@code char}.</li>
* </ul>
*
* Result length is |{@code s}|–1
*
* @param s a {@code String}
* @return a {@code String} containing all characters of {@code s} but the first
*/
public static @NotNull String tail(@NotNull String s) {
return s.substring(1);
}
/**
* Equivalent of Haskell's {@code init} function. Returns all elements of an {@code Iterable} but the last.
* {@code xs} may be infinite, in which the result will be {@code xs}. Uses O(1) additional memory. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|–1
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return an {@code Iterable} containing all elements of {@code xs} but the last
*/
public static @NotNull <T> Iterable<T> init(@NotNull Iterable<T> xs) {
if (isEmpty(xs))
throw new NoSuchElementException();
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next = xsi.next();
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
T oldNext = next;
next = xsi.next();
return oldNext;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
/**
* Equivalent of Haskell's {@code tail} function. Given a {@code String}, returns a {@code String} containing
* all of its characters but the last. Uses O(n) additional memory, where n is the length of {@code s}.
*
* <ul>
* <li>{@code s} must be non-empty.</li>
* <li>The result may be any {@code char}.</li>
* </ul>
*
* Result length is |{@code s}|–1
*
* @param s a {@code String}
* @return a {@code String} containing all characters of {@code s} but the last
*/
public static @NotNull String init(@NotNull String s) {
return s.substring(0, s.length() - 1);
}
/**
* Equivalent of Haskell's {@code null} function. Tests whether an {@code Iterable} contains no elements.
* {@code xs} may be infinite. Uses O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result may be either {@code boolean}.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return whether {@code xs} is empty
*/
public static <T> boolean isEmpty(@NotNull Iterable<T> xs) {
return !xs.iterator().hasNext();
}
/**
* Equivalent of Haskell's {@code null} function. Tests whether a {@code Collection} contains no elements. Uses
* O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result may be either {@code boolean}.</li>
* </ul>
*
* @param xs a {@code Collection}
* @param <T> the {@code Collection}'s element type
* @return whether {@code xs} is empty
*/
public static <T> boolean isEmpty(@NotNull Collection<T> xs) {
return xs.isEmpty();
}
/**
* Equivalent of Haskell's {@code null} function. Tests whether a {@code String} contains no characters. Uses
* O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result may be either {@code boolean}.</li>
* </ul>
*
* @param s a {@code String}
* @return whether {@code s} is empty
*/
public static boolean isEmpty(@NotNull String s) {
return s.isEmpty();
}
/**
* Equivalent of Haskell's {@code length} function. Returns the number of elements in an {@code Iterable}. Only
* works on finite {@code Iterable}s. Uses O(1) additional space.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result is non-negative.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return the {@code Iterable}'s length
*/
public static <T> int length(@NotNull Iterable<T> xs) {
int i = 0;
for (T x : xs) {
i++;
}
return i;
}
/**
* Equivalent of Haskell's {@code length} function. Returns the number of elements in an {@code Iterable}. Only
* works on finite {@code Iterable}s. Uses O(log(n)) additional space, where n is {@code xs}'s length; but it's
* effectively constant space.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result is non-negative.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return the {@code Iterable}'s length
*/
public static @NotNull <T> BigInteger bigIntegerLength(@NotNull Iterable<T> xs) {
BigInteger i = BigInteger.ZERO;
for (T x : xs) {
i = i.add(BigInteger.ONE);
}
return i;
}
/**
* Equivalent of Haskell's {@code length} function. Returns the number of elements in a {@code Collection}. Uses
* O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result is non-negative.</li>
* </ul>
*
* @param xs a {@code Collection}
* @param <T> the {@code Collection}'s element type
* @return the {@code Collection}'s length
*/
public static <T> int length(@NotNull Collection<T> xs) {
return xs.size();
}
/**
* Equivalent of Haskell's {@code length} function. Returns the number of characters in a {@code String}. Uses
* O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result is non-negative.</li>
* </ul>
*
* @param s a {@code String}
* @return the {@code String}'s length
*/
public static int length(@NotNull String s) {
return s.length();
}
//todo docs
public static <T> boolean lengthAtLeast(int length, @NotNull Iterable<T> xs) {
int i = 0;
for (T x : xs) {
i++;
if (i >= length) return true;
}
return false;
}
public static <T> boolean lengthAtLeast(int length, @NotNull Collection<T> xs) {
return xs.size() >= length;
}
public static <T> boolean lengthAtLeast(int length, @NotNull String s) {
return s.length() >= length;
}
/**
* Equivalent of Haskell's {@code map} function. Transforms one {@code Iterable} into another by applying a
* function to each element. {@code xs} may be infinite, in which case the result is also infinite. Uses O(1)
* additional memory. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code f} must be non-null.</li>
* <li>{@code xs} must be non-null.</li>
* <li>{@code xs} must only contain elements that are valid inputs for {@code f}.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|
*
* @param f the function that transforms each element in the {@code Iterable}
* @param xs the {@code Iterable}
* @param <A> the type of the original {@code Iterable}'s elements
* @param <B> the type of the output {@code Iterable}'s elements
* @return an {@code Iterable} containing the elements of {@code xs} transformed by {@code f}
*/
public static @NotNull <A, B> Iterable<B> map(@NotNull Function<A, B> f, @NotNull Iterable<A> xs) {
return () -> new Iterator<B>() {
private final Iterator<A> xsi = xs.iterator();
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public B next() {
return f.apply(xsi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code map} function. Transforms one {@code String} into another by applying a
* function to each character. Uses O(n) additional memory, where n is the length of the input string.
*
* <ul>
* <li>{@code f} must be non-null.</li>
* <li>{@code xs} must be non-null.</li>
* <li>{@code xs} must only contain characters that are valid inputs for {@code f}.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code s}|
*
* @param f the function that transforms each character in the {@code String}
* @param s the {@code String}
* @return a {@code String} containing the characters of {@code s} transformed by {@code f}
*/
public static @NotNull String map(@NotNull Function<Character, Character> f, @NotNull String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
sb.append(f.apply(s.charAt(i)));
}
return sb.toString();
}
/**
* Equivalent of Haskell's {@code reverse} function. Reverses an {@code Iterable}. {@code xs} must be finite.
* Uses O(n) additional memory, where n is the length of {@code xs}. The resulting list may be modified, but the
* modifications will not affect the original {@code Iterable}.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return a {@code List} containing {@code xs}'s elements in reverse order
*/
public static @NotNull <T> List<T> reverse(@NotNull Iterable<T> xs) {
List<T> list = toList(xs);
Collections.reverse(list);
return list;
}
/**
* Equivalent of Haskell's {@code reverse} function. Reverses a {@code String}. Uses O(n) additional memory,
* where n is the length of {@code s}.
*
* <ul>
* <li>{@code s} must be non-null.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code s}|
*
* @param s a {@code String}
* @return a {@code String} containing {@code s}'s characters in reverse order
*/
public static @NotNull String reverse(@NotNull String s) {
char[] reversed = new char[s.length()];
for (int i = 0; i < s.length() / 2; i++) {
int j = s.length() - i - 1;
reversed[i] = s.charAt(j);
reversed[j] = s.charAt(i);
}
if ((s.length() & 1) == 1) {
int i = s.length() / 2;
reversed[i] = s.charAt(i);
}
return new String(reversed);
}
/**
* Equivalent of Haskell's {@code intersperse} function. Given an {@code Iterable} {@code xs} and a seperator
* {@code sep}, returns an {@code Iterable} consisting of the elements of {@code xs} with {@code sep} between
* every adjacent pair. {@code xs} may be infinite, in which case the result is also infinite. Uses O(1)
* additional memory. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code sep} may be anything.</li>
* <li>{@code xs} must be non-null.</li>
* <li>The result is an {@code Iterable} whose odd-indexed (using 0-based indexing) elements are identical.</li>
* </ul>
*
* Result length is 0 when |{@code xs}|=0, 2|{@code xs}|–1 otherwise
*
* @param sep a separator
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return an {@code Iterable} consisting of the elements of {@code xs} interspersed with {@code sep}
*/
public static @NotNull <T> Iterable<T> intersperse(@Nullable T sep, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private boolean separating = false;
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
if (separating) {
separating = false;
return sep;
} else {
separating = true;
return xsi.next();
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code intersperse} function. Given a {@code String} {@code s} and a seperator
* {@code sep}, returns a {@code String} consisting of the characters of {@code s} with {@code sep} between
* every adjacent pair. Uses O(n) additional memory, where n is the length of {@code s}.
*
* <ul>
* <li>{@code sep} may be any {@code char}.</li>
* <li>{@code s} must be non-null.</li>
* <li>The result is a {@code String} whose odd-indexed (using 0-based indexing) characters are identical.</li>
* </ul>
*
* Result length is 0 when |{@code s}|=0, 2|{@code s}|–1 otherwise
*
* @param sep a separator
* @param s a {@code String}
* @return a {@code String} consisting of the characters of {@code s} interspersed with {@code sep}
*/
public static @NotNull String intersperse(char sep, @NotNull String s) {
if (s.isEmpty()) return "";
StringBuilder sb = new StringBuilder();
sb.append(s.charAt(0));
for (int i = 1; i < s.length(); i++) {
sb.append(sep);
sb.append(s.charAt(i));
}
return sb.toString();
}
/**
* Equivalent of Haskell's {@code intercalate} function. Inserts an {@code Iterable} between every two adjacent
* {@code Iterable}s in an {@code Iterable} of {@code Iterable}s, flattening the result. {@code xss}, any
* element of {@code xss}, or {@code xs} may be infinite, in which case the result is also infinite. Uses O(1)
* additional memory. The {@code Iterable} produced does not support removing elements.
*
* Result length is the sum of the lengths of {@code xs}'s elements and (0 if |{@code xss}|=0,
* |{@code xss}|(|{@code xs}|–1) otherwise)
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>{@code xss} must be non-null.</li>
* <li>The result is non-null.</li>
* </ul>
*
* @param xs the separating {@code Iterable}
* @param xss the separated {@code Iterable}
* @param <T> the resulting {@code Iterable}'s element type
* @return {@code xss} intercalated by {@code xs}
*/
public static @NotNull <T> Iterable<T> intercalate(@NotNull Iterable<T> xs, @NotNull Iterable<Iterable<T>> xss) {
return concat(intersperse(xs, xss));
}
/**
* Equivalent of Haskell's {@code intercalate} function. Inserts a {@code String} between every two adjacent
* {@code String}s in an {@code Iterable} of {@code String}s, flattening the result. Uses O(abc) additional
* memory, where a is the length of {@code strings}, b is the maximum length of any string in {@code strings},
* and c is the length of {@code sep}.
* The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code sep} must be non-null.</li>
* <li>{@code strings} must be finite.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is the sum of the lengths of {@code xs}'s elements and (0 if |{@code strings}|=0,
* |{@code strings}|(|{@code sep}|–1) otherwise)
*
* @param sep the separating {@code String}
* @param strings the separated {@code String}s
* @return {@code strings} intercalated by {@code sep}
*/
public static @NotNull String intercalate(@NotNull String sep, @NotNull Iterable<String> strings) {
return concatStrings(intersperse(sep, strings));
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code Iterables}. If the rows have different lengths, then the "overhanging" elements still end up in the
* result. See test cases for examples. Any element of {@code xss} may be infinite, in which case the result will
* be infinite. Uses O(nm) additional memory, where n is then length of {@code xss} and m is the largest amount of
* memory used by any {@code Iterable} in {@code xss}. The {@code Iterable} produced does not support removing
* elements.
*
* <ul>
* <li>{@code xss} must be finite.</li>
* <li>The lengths of the result's elements are finite, non-increasing, and never 0.</li>
* </ul>
*
* Result length is the maximum length of {@code xss}'s elements
*
* @param xss an {@code Iterable} of {@code Iterable}s
* @param <T> the {@code Iterable}'s elements' element type
* @return {@code xss}, transposed
*/
public static @NotNull <T> Iterable<List<T>> transpose(@NotNull Iterable<Iterable<T>> xss) {
return () -> new Iterator<List<T>>() {
private final List<Iterator<T>> iterators = toList(map(Iterable::iterator, xss));
@Override
public boolean hasNext() {
return any(Iterator::hasNext, iterators);
}
@Override
public List<T> next() {
List<T> nextList = new ArrayList<>();
for (Iterator<T> iterator : iterators) {
if (iterator.hasNext()) {
nextList.add(iterator.next());
}
}
return nextList;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code String}s. If the rows have different lengths, then the "overhanging" characters still end up in the
* result. See test cases for examples. Uses O(nm) additional memory, where n is then length of {@code xss} and m
* is the length of the longest {@code String} in {@code xss}. The {@code Iterable} produced does not support
* removing elements.
*
* <ul>
* <li>{@code strings} must be non-null.</li>
* <li>The lengths of the result's elements are non-increasing and never 0.</li>
* </ul>
*
* Result length is the maximum length of {@code strings}'s elements
*
* @param strings an {@code Iterable} of {@code String}s
* @return {@code strings}, transposed
*/
public static @NotNull Iterable<String> transposeStrings(@NotNull Iterable<String> strings) {
return map(
IterableUtils::charsToString,
transpose(map(s -> fromString(s), strings))
);
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code Iterables}. If the rows have different lengths, then the "overhanging" elements will be truncated; the
* result's rows will all have equal lengths. See test cases for examples. Any element of {@code xss} may be
* infinite, in which case the result will be infinite. Uses O(nm) additional memory, where n is then length of
* {@code xss} and m is the largest amount of memory used by any {@code Iterable} in {@code xss}. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code xss} must be finite.</li>
* <li>The lengths of the result's elements are finite and equal.</li>
* </ul>
*
* Result length is the minimum length of {@code xss}'s elements
*
* @param xss an {@code Iterable} of {@code Iterable}s
* @param <T> the {@code Iterable}'s elements' element type
* @return {@code xss}, transposed
*/
public static @NotNull <T> Iterable<List<T>> transposeTruncating(@NotNull Iterable<Iterable<T>> xss) {
return () -> new Iterator<List<T>>() {
private final List<Iterator<T>> iterators = toList(map(Iterable::iterator, xss));
@Override
public boolean hasNext() {
return !iterators.isEmpty() && all(Iterator::hasNext, iterators);
}
@Override
public List<T> next() {
List<T> nextList = new ArrayList<>();
for (Iterator<T> iterator : iterators) {
nextList.add(iterator.next());
}
return nextList;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code String}s. If the rows have different lengths, then the "overhanging" characters will be truncated. See
* test cases for examples. Uses O(nm) additional memory, where n is then length of {@code xss} and m is the
* length of the longest {@code String} in {@code xss}. The {@code Iterable} produced does not support removing
* elements.
*
* <ul>
* <li>{@code strings} must be non-null.</li>
* <li>The lengths of the result's elements are equal.</li>
* </ul>
*
* Result length is the minimum length of {@code strings}'s elements
*
* @param strings an {@code Iterable} of {@code String}s
* @return {@code strings}, transposed
*/
public static @NotNull Iterable<String> transposeStringsTruncating(@NotNull Iterable<String> strings) {
return map(
IterableUtils::charsToString,
transposeTruncating(map(s -> fromString(s), strings))
);
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code Iterables}. If the rows have different lengths, then the gaps will be padded; the result's rows will all
* have equal lengths. See test cases for examples. Any element of {@code xss} may be infinite, in which case the
* result will be infinite. Uses O(nm) additional memory, where n is then length of {@code xss} and m is the
* largest amount of memory used by any {@code Iterable} in {@code xss}. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code xss} must be finite.</li>
* <li>The lengths of the result's elements are equal.</li>
* </ul>
*
* Result length is the maximum length of {@code xss}'s elements
*
* @param xss an {@code Iterable} of {@code Iterable}s
* @param pad the padding
* @param <T> the {@code Iterable}'s elements' element type
* @return {@code xss}, transposed
*/
public static @NotNull <T> Iterable<Iterable<T>> transposePadded(
@Nullable T pad,
@NotNull Iterable<Iterable<T>> xss
) {
return () -> new Iterator<Iterable<T>>() {
private final List<Iterator<T>> iterators = toList(map(Iterable::iterator, xss));
@Override
public boolean hasNext() {
return any(Iterator::hasNext, iterators);
}
@Override
public Iterable<T> next() {
List<T> nextList = new ArrayList<>();
for (Iterator<T> iterator : iterators) {
nextList.add(iterator.hasNext() ? iterator.next() : pad);
}
return nextList;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code String}s. If the rows have different lengths, then the gaps will be padded; the result's rows will all
* have equal lengths. Uses O(nm) additional memory, where n is then length of {@code xss} and m is the length of
* the longest {@code String} in {@code xss}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code strings} must be non-null.</li>
* <li>The lengths of the result's elements are equal.</li>
* </ul>
*
* Result length is the maximum length of {@code strings}'s elements
*
* @param strings an {@code Iterable} of {@code String}s
* @param pad the padding
* @return {@code strings}, transposed
*/
public static @NotNull Iterable<String> transposeStringsPadded(char pad, @NotNull Iterable<String> strings) {
return map(
IterableUtils::charsToString,
transposePadded(pad, map(s -> fromString(s), strings))
);
}
public static @Nullable <A, B> B foldl(
@NotNull Function<Pair<B, A>, B> f,
@Nullable B z,
@NotNull Iterable<A> xs
) {
B result = z;
for (A x : xs) {
result = f.apply(new Pair<B, A>(result, x));
}
return result;
}
public static @Nullable <A> A foldl1(@NotNull Function<Pair<A, A>, A> f, @NotNull Iterable<A> xs) {
A result = null;
boolean started = false;
for (A x : xs) {
if (started) {
result = f.apply(new Pair<A, A>(result, x));
} else {
result = x;
started = true;
}
}
return result;
}
public static @Nullable <A, B> B foldr(
@NotNull Function<Pair<A, B>, B> f,
@Nullable B z,
@NotNull Iterable<A> xs
) {
return foldl(p -> f.apply(new Pair<>(p.b, p.a)), z, reverse(xs));
}
public static @Nullable <A> A foldr1(@NotNull Function<Pair<A, A>, A> f, @NotNull Iterable<A> xs) {
return foldl1(p -> f.apply(new Pair<>(p.b, p.a)), reverse(xs));
}
public static @NotNull <T> Iterable<T> concat(@NotNull Iterable<Iterable<T>> xss) {
return () -> new Iterator<T>() {
final Iterator<Iterable<T>> xssi = xss.iterator();
Iterator<T> xsi = xssi.hasNext() ? xssi.next().iterator() : null;
@Override
public boolean hasNext() {
if (xsi == null) return false;
while (!xsi.hasNext()) {
if (!xssi.hasNext()) return false;
xsi = xssi.next().iterator();
}
return true;
}
@Override
public T next() {
hasNext();
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String concatStrings(@NotNull Iterable<String> strings) {
StringBuilder sb = new StringBuilder();
for (String s : strings) {
sb.append(s);
}
return sb.toString();
}
public static @NotNull <A, B> Iterable<B> concatMap(@NotNull Function<A, Iterable<B>> f, @NotNull Iterable<A> xs) {
return concat(map(f, xs));
}
public static boolean and(@NotNull Iterable<Boolean> xs) {
for (boolean x : xs) {
if (!x) return false;
}
return true;
}
public static boolean or(@NotNull Iterable<Boolean> xs) {
for (boolean x : xs) {
if (x) return true;
}
return false;
}
public static <T> boolean any(@NotNull Predicate<T> predicate, @NotNull Iterable<T> xs) {
for (T x : xs) {
if (predicate.test(x)) return true;
}
return false;
}
public static <T> boolean all(@NotNull Predicate<T> predicate, @NotNull Iterable<T> xs) {
for (T x : xs) {
if (!predicate.test(x)) return false;
}
return true;
}
public static byte sumByte(@NotNull Iterable<Byte> xs) {
return foldl(p -> (byte) (p.a + p.b), (byte) 0, xs);
}
public static short sumShort(@NotNull Iterable<Short> xs) {
return foldl(p -> (short) (p.a + p.b), (short) 0, xs);
}
public static int sumInteger(@NotNull Iterable<Integer> xs) {
return foldl(p -> p.a + p.b, 0, xs);
}
public static long sumLong(@NotNull Iterable<Long> xs) {
return foldl(p -> p.a + p.b, 0L, xs);
}
public static float sumFloat(@NotNull Iterable<Float> xs) {
return foldl(p -> p.a + p.b, 0.0f, xs);
}
public static double sumDouble(Iterable<Double> xs) {
return foldl(p -> p.a + p.b, 0.0, xs);
}
public static @NotNull BigInteger sumBigInteger(@NotNull Iterable<BigInteger> xs) {
return foldl(p -> p.a.add(p.b), BigInteger.ZERO, xs);
}
public static @NotNull BigDecimal sumBigDecimal(@NotNull Iterable<BigDecimal> xs) {
return foldl(p -> p.a.add(p.b), BigDecimal.ZERO, xs);
}
public static byte productByte(@NotNull Iterable<Byte> xs) {
return foldl(p -> (byte) (p.a * p.b), (byte) 1, xs);
}
public static short productShort(@NotNull Iterable<Short> xs) {
return foldl(p -> (short) (p.a * p.b), (short) 1, xs);
}
public static int productInteger(@NotNull Iterable<Integer> xs) {
return foldl(p -> p.a * p.b, 1, xs);
}
public static long productLong(@NotNull Iterable<Long> xs) {
return foldl(p -> p.a * p.b, 1L, xs);
}
public static float productFloat(@NotNull Iterable<Float> xs) {
return foldl(p -> p.a * p.b, 1.0f, xs);
}
public static double productDouble(@NotNull Iterable<Double> xs) {
return foldl(p -> p.a * p.b, 1.0, xs);
}
public static @NotNull BigInteger productBigInteger(Iterable<BigInteger> xs) {
return foldl(p -> p.a.multiply(p.b), BigInteger.ONE, xs);
}
public static @NotNull BigDecimal productBigDecimal(@NotNull Iterable<BigDecimal> xs) {
return foldl(p -> p.a.multiply(p.b), BigDecimal.ONE, xs);
}
public static @NotNull <T extends Comparable<T>> T maximum(@NotNull Iterable<T> xs) {
return foldl1(p -> max(p.a, p.b), xs);
}
public static char maximum(@NotNull String s) {
return foldl1(p -> max(p.a, p.b), fromString(s));
}
public static @NotNull <T extends Comparable<T>> T minimum(@NotNull Iterable<T> xs) {
return foldl1(p -> min(p.a, p.b), xs);
}
public static char minimum(@NotNull String s) {
return foldl1(p -> min(p.a, p.b), fromString(s));
}
public static @NotNull <A, B> Iterable<B> scanl(
@NotNull Function<Pair<B, A>, B> f,
@Nullable B z,
@NotNull Iterable<A> xs
) {
return () -> new Iterator<B>() {
private final Iterator<A> xsi = xs.iterator();
private B result = z;
private boolean firstTime = true;
@Override
public boolean hasNext() {
return firstTime || xsi.hasNext();
}
@Override
public B next() {
if (firstTime) {
firstTime = false;
return result;
} else {
result = f.apply(new Pair<B, A>(result, xsi.next()));
return result;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <A> Iterable<A> scanl1(@NotNull Function<Pair<A, A>, A> f, @NotNull Iterable<A> xs) {
return scanl(f, head(xs), tail(xs));
}
public static @NotNull <A, B> Iterable<B> scanr(
@NotNull Function<Pair<A, B>, B> f,
@NotNull B z,
@NotNull Iterable<A> xs
) {
return scanl(p -> f.apply(new Pair<A, B>(p.b, p.a)), z, reverse(xs));
}
public static @NotNull <A> Iterable<A> scanr1(@NotNull Function<Pair<A, A>, A> f, @NotNull Iterable<A> xs) {
return scanl1(p -> f.apply(new Pair<A, A>(p.b, p.a)), reverse(xs));
}
public static @NotNull <X, Y, ACC> Pair<ACC, List<Y>> mapAccumL(
@NotNull Function<Pair<ACC, X>, Pair<ACC, Y>> f,
@Nullable ACC s,
@NotNull Iterable<X> xs
) {
List<Y> ys = new ArrayList<Y>();
for (X x : xs) {
Pair<ACC, Y> p = f.apply(new Pair<ACC, X>(s, x));
s = p.a;
ys.add(p.b);
}
return new Pair<>(s, ys);
}
public static @NotNull <X, Y, ACC> Pair<ACC, List<Y>> mapAccumR(
@NotNull Function<Pair<ACC, X>, Pair<ACC, Y>> f,
@Nullable ACC s,
@NotNull Iterable<X> xs) {
return mapAccumL(f, s, reverse(xs));
}
public static @NotNull <T> Iterable<T> iterate(@NotNull Function<T, T> f, @Nullable T x) {
return () -> new Iterator<T>() {
private T current = x;
private boolean firstTime = true;
@Override
public boolean hasNext() {
return true;
}
@Override
public T next() {
if (firstTime) {
firstTime = false;
} else {
current = f.apply(current);
}
return current;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> repeat(@Nullable T x) {
return () -> new Iterator<T>() {
@Override
public boolean hasNext() {
return true;
}
@Override
public T next() {
return x;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> replicate(int n, @Nullable T x) {
return () -> new Iterator<T>() {
private int i = 0;
@Override
public boolean hasNext() {
return i < n;
}
@Override
public T next() {
i++;
return x;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> replicate(@NotNull BigInteger n, @Nullable T x) {
return () -> new Iterator<T>() {
private BigInteger i = BigInteger.ZERO;
@Override
public boolean hasNext() {
return lt(i, n);
}
@Override
public T next() {
i = i.add(BigInteger.ONE);
return x;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String replicate(int n, char c) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < n; i++) {
sb.append(c);
}
return sb.toString();
}
public static @NotNull String replicate(@NotNull BigInteger n, char c) {
StringBuilder sb = new StringBuilder();
for (BigInteger i : range(BigInteger.ONE, n)) {
sb.append(c);
}
return sb.toString();
}
public static @NotNull <T> Iterable<T> cycle(@NotNull Iterable<T> xs) {
if (isEmpty(xs)) return xs;
return () -> new Iterator<T>() {
private Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return true;
}
@Override
public T next() {
if (!xsi.hasNext()) xsi = xs.iterator();
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <A, B> Iterable<A> unfoldr(@NotNull Function<B, Optional<Pair<A, B>>> f, @NotNull B x) {
return new Iterable<A>() {
@Override
public Iterator<A> iterator() {
return new Iterator<A>() {
private boolean hasNext = true;
private A next;
private B seed = x;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public A next() {
A oldNext = next;
advance();
return oldNext;
}
private void advance() {
Optional<Pair<A, B>> p = f.apply(seed);
if (p.isPresent()) {
next = p.get().a;
seed = p.get().b;
} else {
hasNext = false;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull <T> Iterable<T> take(int n, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private int i = 0;
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return i < n && xsi.hasNext();
}
@Override
public T next() {
i++;
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> take(@NotNull BigInteger n, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private BigInteger i = BigInteger.ZERO;
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return lt(i, n) && xsi.hasNext();
}
@Override
public T next() {
i = i.add(BigInteger.ONE);
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String take(int n, @NotNull String s) {
return s.substring(0, n);
}
public static @NotNull String take(@NotNull BigInteger n, @NotNull String s) {
return s.substring(0, n.intValueExact());
}
public static @NotNull <T> Iterable<T> drop(int n, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
{
int i = n;
while (xsi.hasNext()) {
if (i <= 0) break;
xsi.next();
i--;
}
}
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> drop(@NotNull BigInteger n, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
{
BigInteger i = n;
while (xsi.hasNext()) {
if (le(i, BigInteger.ZERO)) break;
xsi.next();
i = i.subtract(BigInteger.ONE);
}
}
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String drop(int n, @NotNull String s) {
return s.substring(n);
}
public static @NotNull String drop(@NotNull BigInteger n, @NotNull String s) {
return s.substring(n.intValueExact());
}
public static @NotNull <T> Iterable<T> pad(@NotNull T pad, int length, @NotNull Iterable<T> xs) {
if (length < 0)
throw new IllegalArgumentException("cannot pad with a negative length");
return take(length, concat(xs, repeat(pad)));
}
public static @NotNull <T> Iterable<T> pad(@NotNull T pad, @NotNull BigInteger length, @NotNull Iterable<T> xs) {
if (length.signum() == -1)
throw new IllegalArgumentException("cannot pad with a negative length");
return take(length, (Iterable<T>) concat(xs, repeat(pad)));
}
public static @NotNull String pad(char pad, int length, @NotNull String s) {
if (s.length() == length) return s;
if (s.length() > length) return take(length, s);
return s + replicate(length - s.length(), pad);
}
public static @NotNull String pad(char pad, @NotNull BigInteger length, @NotNull String s) {
if (s.length() == length.intValueExact()) return s;
if (s.length() > length.intValueExact()) return take(length, s);
return s + replicate(length.intValueExact() - s.length(), pad);
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> splitAt(int n, @NotNull Iterable<T> xs) {
return new Pair<>(take(n, xs), drop(n, xs));
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> splitAt(@NotNull BigInteger n, @NotNull Iterable<T> xs) {
return new Pair<>(take(n, xs), drop(n, xs));
}
public static @NotNull Pair<String, String> splitAt(int n, @NotNull String s) {
return new Pair<>(s.substring(0, n), s.substring(n));
}
public static @NotNull Pair<String, String> splitAt(@NotNull BigInteger i, @NotNull String s) {
return splitAt(i.intValueExact(), s);
}
public static @NotNull <T> Iterable<T> takeWhile(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
if (xsi.hasNext()) {
next = xsi.next();
hasNext = p.test(next);
} else {
hasNext = false;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String takeWhile(@NotNull Predicate<Character> p, @NotNull String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (!p.test(c)) break;
sb.append(c);
}
return sb.toString();
}
public static @NotNull <T> Iterable<T> stopAt(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
if (next != null && p.test(next)) {
hasNext = false;
} else {
hasNext = xsi.hasNext();
if (xsi.hasNext()) {
next = xsi.next();
}
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String stopAt(@NotNull Predicate<Character> p, @NotNull String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
sb.append(c);
if (p.test(c)) break;
}
return sb.toString();
}
public static @NotNull <T> Iterable<T> dropWhile(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private Iterator<T> xsi = xs.iterator();
private T x;
private boolean first = false;
{
while (xsi.hasNext()) {
x = xsi.next();
if (!p.test(x)) {
first = true;
break;
}
}
}
@Override
public boolean hasNext() {
return first || xsi.hasNext();
}
@Override
public T next() {
if (first) {
first = false;
return x;
} else {
return xsi.next();
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String dropWhile(@NotNull Predicate<Character> p, @NotNull String s) {
int startIndex = -1;
for (int i = 0; i < s.length(); i++) {
if (p.test(s.charAt(i))) {
startIndex = i;
break;
}
}
return startIndex == -1 ? "" : s.substring(startIndex);
}
public static @NotNull <T> Iterable<T> dropWhileEnd(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
List<T> list = toList(xs);
int index = -1;
for (int i = list.size() - 1; i >= 0; i--) {
if (!p.test(list.get(i))) {
index = i;
break;
}
}
return take(index + 1, list);
}
public static @NotNull String dropWhileEnd(@NotNull Predicate<Character> p, @NotNull String s) {
int index = -1;
for (int i = s.length() - 1; i >= 0; i--) {
if (!p.test(s.charAt(i))) {
index = i;
break;
}
}
return take(index + 1, s);
}
public static @NotNull <T> Iterable<List<T>> chunk(int size, @NotNull Iterable<T> xs) {
return () -> new Iterator<List<T>>() {
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public List<T> next() {
List<T> chunk = new ArrayList<>();
for (int i = 0; i < size; i++) {
if (!xsi.hasNext()) break;
chunk.add(xsi.next());
}
return chunk;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<String> chunk(int size, @NotNull String s) {
return () -> new Iterator<String>() {
private int i = 0;
@Override
public boolean hasNext() {
return i != s.length();
}
@Override
public String next() {
StringBuilder sb = new StringBuilder();
for (int j = 0; j < size; j++) {
if (i == s.length()) break;
sb.append(s.charAt(i++));
}
return sb.toString();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<List<T>> chunkPadded(@Nullable T pad, int size, @NotNull Iterable<T> xs) {
return () -> new Iterator<List<T>>() {
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public List<T> next() {
List<T> chunk = new ArrayList<>();
for (int i = 0; i < size; i++) {
chunk.add(xsi.hasNext() ? xsi.next() : pad);
}
return chunk;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> span(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return new Pair<>(takeWhile(p, xs), dropWhile(p, xs));
}
public static @NotNull Pair<String, String> span(@NotNull Predicate<Character> p, @NotNull String s) {
return new Pair<>(takeWhile(p, s), dropWhile(p, s));
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> breakIterable(Predicate<T> p, Iterable<T> xs) {
return span(p.negate(), xs);
}
public static @NotNull Pair<String, String> breakString(@NotNull Predicate<Character> p, @NotNull String s) {
return span(p.negate(), s);
}
public static @NotNull <T> Optional<Iterable<T>> stripPrefix(Iterable<T> prefix, Iterable<T> xs) {
return isPrefixOf(prefix, xs) ? Optional.of(take(length(prefix), xs)) : Optional.<Iterable<T>>empty();
}
public static @NotNull <T> Iterable<Pair<T, Integer>> countAdjacent(@NotNull Iterable<T> xs) {
return new Iterable<Pair<T, Integer>>() {
@Override
public Iterator<Pair<T, Integer>> iterator() {
return new Iterator<Pair<T, Integer>>() {
private Iterator<T> xsi = xs.iterator();
private boolean hasNext = xsi.hasNext();
private boolean isLast = false;
private T nextX = null;
private Pair<T, Integer> next = null;
{
if (hasNext) {
nextX = xsi.next();
}
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public Pair<T, Integer> next() {
if (isLast) {
hasNext = false;
return next;
} else {
Pair<T, Integer> oldNext = next;
advance();
return oldNext;
}
}
private void advance() {
T original = nextX;
int count = 0;
do {
count++;
if (!xsi.hasNext()) {
isLast = true;
break;
}
nextX = xsi.next();
} while (Objects.equals(original, nextX));
next = new Pair<>(original, count);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull Iterable<Pair<Character, Integer>> countAdjacent(@NotNull String s) {
return countAdjacent(fromString(s));
}
public static @NotNull <T> Iterable<List<T>> group(@NotNull Iterable<T> xs) {
return group(p -> Objects.equals(p.a, p.b), xs);
}
public static @NotNull <T> Iterable<String> group(@NotNull String s) {
return group(p -> p.a == p.b, s);
}
public static @NotNull <T> Iterable<List<T>> inits(@NotNull Iterable<T> xs) {
return cons(new ArrayList<T>(), ()-> new Iterator<List<T>>() {
private Iterator<T> xsi = xs.iterator();
private List<T> currentList = new ArrayList<>();
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public List<T> next() {
List<T> nextList = new ArrayList<>();
nextList.addAll(currentList);
nextList.add(xsi.next());
currentList = nextList;
return currentList;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
});
}
public static @NotNull Iterable<String> inits(@NotNull String s) {
return map(i -> s.substring(0, i), range(0, s.length()));
}
public static @NotNull <T> Iterable<List<T>> tails(@NotNull Iterable<T> xs) {
List<T> list = toList(xs);
return map(
i -> {
List<T> subList = new ArrayList<T>();
for (int j = i; j < list.size(); j++) {
subList.add(list.get(j));
}
return subList;
},
range(0, list.size())
);
}
public static @NotNull Iterable<String> tails(@NotNull String s) {
return map(s::substring, range(0, s.length()));
}
public static <T> boolean isPrefixOf(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
Iterator<T> xsi = xs.iterator();
Iterator<T> ysi = ys.iterator();
while (xsi.hasNext()) {
if (!ysi.hasNext()) return false;
T x = xsi.next();
T y = ysi.next();
if (!Objects.equals(x, y)) return false;
}
return true;
}
public static boolean isPrefixOf(@NotNull String s, @NotNull String t) {
return s.length() <= t.length() && s.substring(0, t.length()).equals(t);
}
public static <T> boolean isSuffixOf(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
return isPrefixOf(reverse(xs), reverse(ys));
}
public static boolean isSuffixOf(@NotNull String s, @NotNull String t) {
return s.length() <= t.length() && s.substring(t.length() - s.length()).equals(t);
}
public static @NotNull <T> Iterable<List<T>> windows(int size, @NotNull Iterable<T> xs) {
List<T> firstWindow = toList(take(size, xs));
if (firstWindow.size() < size) return new ArrayList<>();
return cons(firstWindow, () -> new Iterator<List<T>>() {
Iterator<T> xsi = drop(size, xs).iterator();
List<T> previousWindow = firstWindow;
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public List<T> next() {
previousWindow = toList(concat(tail(previousWindow), Arrays.asList(xsi.next())));
return previousWindow;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
});
}
public static @NotNull Iterable<String> windows(int size, @NotNull String s) {
String firstWindow = take(size, s);
if (firstWindow.length() < size) return new ArrayList<>();
return cons(firstWindow, () -> new Iterator<String>() {
Iterator<Character> xsi = fromString(drop(size, s)).iterator();
String previousWindow = firstWindow;
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public String next() {
previousWindow = concat(tail(previousWindow), Character.toString(xsi.next()));
return previousWindow;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
});
}
public static <T> boolean isInfixOf(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
return any(zs -> equal(xs, zs), windows(length(xs), ys));
}
public static boolean isInfixOf(@NotNull String s, @NotNull String t) {
return t.contains(s);
}
public static @NotNull <T> Iterable<T> mux(@NotNull List<Iterable<T>> xss) {
return concat(map(list -> list, transpose(xss)));
}
public static @NotNull String muxStrings(@NotNull List<String> xss) {
return concatStrings(transposeStrings(xss));
}
public static @NotNull <T> List<Iterable<T>> demux(int lines, @NotNull Iterable<T> xs) {
List<Iterable<T>> demuxed = new ArrayList<>();
for (int i = 0; i < lines; i++) {
Iterable<Boolean> mask = concat(
replicate(i, false),
cycle(cons(true, (Iterable<Boolean>) replicate(lines - 1, false)))
);
demuxed.add(select(mask, xs));
}
return demuxed;
}
public static @NotNull List<String> demux(int lines, @NotNull String s) {
List<String> demuxed = new ArrayList<>();
for (int i = 0; i < lines; i++) {
Iterable<Boolean> mask = concat(
replicate(i, false),
cycle(cons(true, (Iterable<Boolean>) replicate(lines - 1, false)))
);
demuxed.add(select(mask, s));
}
return demuxed;
}
public static @NotNull <T> Optional<T> find(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
for (T x : xs) {
if (p.test(x)) return Optional.of(x);
}
return Optional.empty();
}
public static @NotNull Optional<Character> find(@NotNull Predicate<Character> p, @NotNull String s) {
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (p.test(c)) return Optional.of(c);
}
return Optional.empty();
}
public static @NotNull <T> Iterable<T> filter(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
while (xsi.hasNext()) {
next = xsi.next();
if (p.test(next)) {
hasNext = true;
return;
}
}
hasNext = false;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String filter(@NotNull Predicate<Character> p, @NotNull String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (p.test(c)) sb.append(c);
}
return sb.toString();
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> partition(
@NotNull Predicate<T> p,
@NotNull Iterable<T> xs) {
return new Pair<>(filter(p, xs), filter(x -> !p.test(x), xs));
}
public static @NotNull Pair<String, String> partition(@NotNull Predicate<Character> p, @NotNull String s) {
StringBuilder sba = new StringBuilder();
StringBuilder sbb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
(p.test(c) ? sba : sbb).append(c);
}
return new Pair<>(sba.toString(), sbb.toString());
}
public static <T> T get(Iterable<T> xs, int i) {
if (i < 0)
throw new IndexOutOfBoundsException();
Iterator<T> xsi = xs.iterator();
T element = null;
for (int j = 0; j <= i; j++) {
if (!xsi.hasNext())
throw new IndexOutOfBoundsException();
element = xsi.next();
}
return element;
}
public static <T> T get(Iterable<T> xs, BigInteger i) {
if (lt(i, BigInteger.ZERO))
throw new IndexOutOfBoundsException();
Iterator<T> xsi = xs.iterator();
T element = null;
for (BigInteger j : range(BigInteger.ONE, i)) {
if (!xsi.hasNext())
throw new IndexOutOfBoundsException();
element = xsi.next();
}
return element;
}
public static <T> T get(List<T> xs, int i) {
return xs.get(i);
}
public static char get(String s, int i) {
return s.charAt(i);
}
public static <T> Iterable<T> select(Iterable<Boolean> bs, Iterable<T> xs) {
return map(p -> p.b, filter(p -> p.a, (Iterable<Pair<Boolean, T>>) zip(bs, xs)));
}
public static <T> String select(Iterable<Boolean> bs, String s) {
return charsToString(
map(p -> p.b, filter(p -> p.a, (Iterable<Pair<Boolean, Character>>) zip(bs, fromString(s))))
);
}
public static <A, B> Iterable<Pair<A, B>> zip(Iterable<A> as, Iterable<B> bs) {
return () -> new Iterator<Pair<A, B>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() && bsi.hasNext();
}
@Override
public Pair<A, B> next() {
return new Pair<>(asi.next(), bsi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C> Iterable<Triple<A, B, C>> zip3(Iterable<A> as, Iterable<B> bs, Iterable<C> cs) {
return () -> new Iterator<Triple<A, B, C>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() && bsi.hasNext() && csi.hasNext();
}
@Override
public Triple<A, B, C> next() {
return new Triple<>(asi.next(), bsi.next(), csi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D> Iterable<Quadruple<A, B, C, D>> zip4(
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds
) {
return () -> new Iterator<Quadruple<A, B, C, D>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() && bsi.hasNext() && csi.hasNext() && dsi.hasNext();
}
@Override
public Quadruple<A, B, C, D> next() {
return new Quadruple<>(asi.next(), bsi.next(), csi.next(), dsi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E> Iterable<Quintuple<A, B, C, D, E>> zip5(
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es
) {
return () -> new Iterator<Quintuple<A, B, C, D, E>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() && bsi.hasNext() && csi.hasNext() && dsi.hasNext() && esi.hasNext();
}
@Override
public Quintuple<A, B, C, D, E> next() {
return new Quintuple<>(asi.next(), bsi.next(), csi.next(), dsi.next(), esi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E, F> Iterable<Sextuple<A, B, C, D, E, F>> zip6(
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs
) {
return () -> new Iterator<Sextuple<A, B, C, D, E, F>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
private final Iterator<F> fsi = fs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() &&
bsi.hasNext() &&
csi.hasNext() &&
dsi.hasNext() &&
esi.hasNext() &&
fsi.hasNext();
}
@Override
public Sextuple<A, B, C, D, E, F> next() {
return new Sextuple<>(asi.next(), bsi.next(), csi.next(), dsi.next(), esi.next(), fsi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E, F, G> Iterable<Septuple<A, B, C, D, E, F, G>> zip7(
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs,
Iterable<G> gs
) {
return () -> new Iterator<Septuple<A, B, C, D, E, F, G>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
private final Iterator<F> fsi = fs.iterator();
private final Iterator<G> gsi = gs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() &&
bsi.hasNext() &&
csi.hasNext() &&
dsi.hasNext() &&
esi.hasNext() &&
fsi.hasNext() &&
gsi.hasNext();
}
@Override
public Septuple<A, B, C, D, E, F, G> next() {
return new Septuple<>(
asi.next(),
bsi.next(),
csi.next(),
dsi.next(),
esi.next(),
fsi.next(),
gsi.next()
);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B> Iterable<Pair<A, B>> zipPadded(A aPad, B bPad, Iterable<A> as, Iterable<B> bs) {
return () -> new Iterator<Pair<A, B>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() || bsi.hasNext();
}
@Override
public Pair<A, B> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
return new Pair<>(a, b);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C> Iterable<Triple<A, B, C>> zip3Padded(
A aPad,
B bPad,
C cPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs
) {
return () -> new Iterator<Triple<A, B, C>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() || bsi.hasNext() || csi.hasNext();
}
@Override
public Triple<A, B, C> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
return new Triple<>(a, b, c);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D> Iterable<Quadruple<A, B, C, D>> zip4Padded(
A aPad,
B bPad,
C cPad,
D dPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds
) {
return () -> new Iterator<Quadruple<A, B, C, D>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() || bsi.hasNext() || csi.hasNext() || dsi.hasNext();
}
@Override
public Quadruple<A, B, C, D> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
D d = dsi.hasNext() ? dsi.next() : dPad;
return new Quadruple<>(a, b, c, d);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E> Iterable<Quintuple<A, B, C, D, E>> zip5Padded(
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es) {
return () -> new Iterator<Quintuple<A, B, C, D, E>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() || bsi.hasNext() || csi.hasNext() || dsi.hasNext() || esi.hasNext();
}
@Override
public Quintuple<A, B, C, D, E> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
D d = dsi.hasNext() ? dsi.next() : dPad;
E e = esi.hasNext() ? esi.next() : ePad;
return new Quintuple<>(a, b, c, d, e);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E, F> Iterable<Sextuple<A, B, C, D, E, F>> zip6Padded(
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
F fPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs) {
return () -> new Iterator<Sextuple<A, B, C, D, E, F>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
private final Iterator<F> fsi = fs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() ||
bsi.hasNext() ||
csi.hasNext() ||
dsi.hasNext() ||
esi.hasNext() ||
fsi.hasNext();
}
@Override
public Sextuple<A, B, C, D, E, F> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
D d = dsi.hasNext() ? dsi.next() : dPad;
E e = esi.hasNext() ? esi.next() : ePad;
F f = fsi.hasNext() ? fsi.next() : fPad;
return new Sextuple<>(a, b, c, d, e, f);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E, F, G> Iterable<Septuple<A, B, C, D, E, F, G>> zip7Padded(
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
F fPad,
G gPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs,
Iterable<G> gs) {
return () -> new Iterator<Septuple<A, B, C, D, E, F, G>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
private final Iterator<F> fsi = fs.iterator();
private final Iterator<G> gsi = gs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() ||
bsi.hasNext() ||
csi.hasNext() ||
dsi.hasNext() ||
esi.hasNext() ||
fsi.hasNext() ||
gsi.hasNext();
}
@Override
public Septuple<A, B, C, D, E, F, G> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
D d = dsi.hasNext() ? dsi.next() : dPad;
E e = esi.hasNext() ? esi.next() : ePad;
F f = fsi.hasNext() ? fsi.next() : fPad;
G g = gsi.hasNext() ? gsi.next() : gPad;
return new Septuple<>(a, b, c, d, e, f, g);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, O> Iterable<O> zipWith(
Function<Pair<A, B>, O> f,
Iterable<A> as,
Iterable<B> bs
) {
return map(f, zip(as, bs));
}
public static <A, B, C, O> Iterable<O> zipWith3(
Function<Triple<A, B, C>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs
) {
return map(f, zip3(as, bs, cs));
}
public static <A, B, C, D, O> Iterable<O> zipWith4(
Function<Quadruple<A, B, C, D>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds
) {
return map(f, zip4(as, bs, cs, ds));
}
public static <A, B, C, D, E, O> Iterable<O> zipWith5(
Function<Quintuple<A, B, C, D, E>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es
) {
return map(f, zip5(as, bs, cs, ds, es));
}
public static <A, B, C, D, E, F, O> Iterable<O> zipWith6(
Function<Sextuple<A, B, C, D, E, F>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs
) {
return map(f, zip6(as, bs, cs, ds, es, fs));
}
public static <A, B, C, D, E, F, G, O> Iterable<O> zipWith6(
Function<Septuple<A, B, C, D, E, F, G>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs,
Iterable<G> gs
) {
return map(f, zip7(as, bs, cs, ds, es, fs, gs));
}
public static <A, B, O> Iterable<O> zipWithPadded(
Function<Pair<A, B>, O> f,
A aPad,
B bPad,
Iterable<A> as,
Iterable<B> bs
) {
return map(f, zipPadded(aPad, bPad, as, bs));
}
public static <A, B, C, O> Iterable<O> zipWith3Padded(
Function<Triple<A, B, C>, O> f,
A aPad,
B bPad,
C cPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs
) {
return map(f, zip3Padded(aPad, bPad, cPad, as, bs, cs));
}
public static <A, B, C, D, O> Iterable<O> zipWith4Padded(
Function<Quadruple<A, B, C, D>, O> f,
A aPad,
B bPad,
C cPad,
D dPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds
) {
return map(f, zip4Padded(aPad, bPad, cPad, dPad, as, bs, cs, ds));
}
public static <A, B, C, D, E, O> Iterable<O> zipWith5Padded(
Function<Quintuple<A, B, C, D, E>, O> f,
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es
) {
return map(f, zip5Padded(aPad, bPad, cPad, dPad, ePad, as, bs, cs, ds, es));
}
public static <A, B, C, D, E, F, O> Iterable<O> zipWith6Padded(
Function<Sextuple<A, B, C, D, E, F>, O> f,
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
F fPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs
) {
return map(f, zip6Padded(aPad, bPad, cPad, dPad, ePad, fPad, as, bs, cs, ds, es, fs));
}
public static <A, B, C, D, E, F, G, O> Iterable<O> zipWith7Padded(
Function<Septuple<A, B, C, D, E, F, G>, O> f,
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
F fPad,
G gPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs,
Iterable<G> gs
) {
return map(f, zip7Padded(aPad, bPad, cPad, dPad, ePad, fPad, gPad, as, bs, cs, ds, es, fs, gs));
}
public static <A, B> Pair<Iterable<A>, Iterable<B>> unzip(Iterable<Pair<A, B>> ps) {
return new Pair<>(
map(p -> p.a, ps),
map(p -> p.b, ps)
);
}
public static <A, B, C> Triple<Iterable<A>, Iterable<B>, Iterable<C>> unzip3(Iterable<Triple<A, B, C>> ps) {
return new Triple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps)
);
}
public static <A, B, C, D> Quadruple<
Iterable<A>,
Iterable<B>,
Iterable<C>,
Iterable<D>
> unzip4(Iterable<Quadruple<A, B, C, D>> ps) {
return new Quadruple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps),
map(p -> p.d, ps)
);
}
public static <A, B, C, D, E> Quintuple<
Iterable<A>,
Iterable<B>,
Iterable<C>,
Iterable<D>,
Iterable<E>
> unzip5(Iterable<Quintuple<A, B, C, D, E>> ps) {
return new Quintuple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps),
map(p -> p.d, ps),
map(p -> p.e, ps)
);
}
public static <A, B, C, D, E, F> Sextuple<
Iterable<A>,
Iterable<B>,
Iterable<C>,
Iterable<D>,
Iterable<E>,
Iterable<F>
> unzip6(Iterable<Sextuple<A, B, C, D, E, F>> ps) {
return new Sextuple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps),
map(p -> p.d, ps),
map(p -> p.e, ps),
map(p -> p.f, ps)
);
}
public static <A, B, C, D, E, F, G> Septuple<
Iterable<A>,
Iterable<B>,
Iterable<C>,
Iterable<D>,
Iterable<E>,
Iterable<F>,
Iterable<G>
> unzip7(Iterable<Septuple<A, B, C, D, E, F, G>> ps) {
return new Septuple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps),
map(p -> p.d, ps),
map(p -> p.e, ps),
map(p -> p.f, ps),
map(p -> p.g, ps)
);
}
public static @NotNull <T> Iterable<T> nub(@NotNull Iterable<T> xs) {
return new Iterable<T>() {
private Set<T> seen = new HashSet<>();
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
while (xsi.hasNext()) {
next = xsi.next();
if (!seen.contains(next)) {
seen.add(next);
hasNext = true;
return;
}
}
hasNext = false;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String nub(@NotNull String s) {
Set<Character> seen = new HashSet<>();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (!seen.contains(c)) {
seen.add(c);
sb.append(c);
}
}
return sb.toString();
}
public static <T> boolean isSubsetOf(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
HashSet<T> set = new HashSet<>();
addTo(xs, set);
for (T y : ys) {
set.remove(y);
if (set.isEmpty()) return true;
}
return false;
}
public static <T> boolean isSubsetOf(@NotNull String s, @NotNull String t) {
return isSubsetOf(fromString(s), fromString(t));
}
public static @NotNull <T extends Comparable<T>> List<T> sort(@NotNull Iterable<T> xss) {
List<T> list = toList(xss);
Collections.sort(list);
return list;
}
public static @NotNull String sort(@NotNull String s) {
List<Character> list = toList(s);
Collections.sort(list);
return charsToString(list);
}
public static @NotNull <T> Iterable<T> nub(@NotNull Predicate<Pair<T, T>> p, @NotNull Iterable<T> xs) {
return new Iterable<T>() {
private Set<T> seen = new HashSet<>();
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
while (xsi.hasNext()) {
next = xsi.next();
boolean good = !seen.contains(next) && !any(x -> p.test(new Pair<T, T>(next, x)), seen);
if (good) {
seen.add(next);
hasNext = true;
return;
}
}
hasNext = false;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String nub(@NotNull Predicate<Pair<Character, Character>> p, @NotNull String s) {
Set<Character> seen = new HashSet<>();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (!seen.contains(c) && !any(x -> p.test(new Pair<Character, Character>(c, x)), seen)) {
seen.add(c);
sb.append(c);
}
}
return sb.toString();
}
public static @NotNull <T extends Comparable<T>> List<T> sort(
@NotNull Comparator<T> comparator,
@NotNull Iterable<T> xss
) {
List<T> list = toList(xss);
Collections.sort(list, comparator);
return list;
}
public static @NotNull String sort(@NotNull Comparator<Character> comparator, @NotNull String s) {
List<Character> list = toList(s);
Collections.sort(list, comparator);
return charsToString(list);
}
public static @NotNull <T extends Comparable<T>> T maximum(
@NotNull Comparator<T> comparator,
@NotNull Iterable<T> xs
) {
return foldl1(p -> max(comparator, p.a, p.b), xs);
}
public static char maximum(@NotNull Comparator<Character> comparator, @NotNull String s) {
return foldl1(p -> max(comparator, p.a, p.b), fromString(s));
}
public static @NotNull <T extends Comparable<T>> T minimum(
@NotNull Comparator<T> comparator,
@NotNull Iterable<T> xs
) {
return foldl1(p -> min(comparator, p.a, p.b), xs);
}
public static char minimum(@NotNull Comparator<Character> comparator, @NotNull String s) {
return foldl1(p -> min(comparator, p.a, p.b), fromString(s));
}
public static @NotNull <T> Iterable<List<T>> group(
@NotNull Predicate<Pair<T, T>> p,
@NotNull Iterable<T> xs
) {
return new Iterable<List<T>>() {
@Override
public Iterator<List<T>> iterator() {
return new Iterator<List<T>>() {
private Iterator<T> xsi = xs.iterator();
private boolean hasNext = xsi.hasNext();
private boolean isLast = false;
private T nextX = null;
private List<T> next = null;
{
if (hasNext) {
nextX = xsi.next();
}
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public List<T> next() {
if (isLast) {
hasNext = false;
return next;
} else {
List<T> oldNext = next;
advance();
return oldNext;
}
}
private void advance() {
T original = nextX;
List<T> list = new ArrayList<>();
do {
list.add(nextX);
if (!xsi.hasNext()) {
isLast = true;
break;
}
nextX = xsi.next();
} while (p.test(new Pair<T, T>(original, nextX)));
next = list;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull Iterable<String> group(
@NotNull Predicate<Pair<Character, Character>> p,
@NotNull String s
) {
return map(IterableUtils::charsToString, group(p, fromString(s)));
}
public static <T> boolean equal(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
Iterator<T> xsi = xs.iterator();
Iterator<T> ysi = ys.iterator();
while (xsi.hasNext()) {
if (!ysi.hasNext()) return false;
T x = xsi.next();
T y = ysi.next();
if (!Objects.equals(x, y)) return false;
}
return !ysi.hasNext();
}
}
|
src/main/java/mho/wheels/iterables/IterableUtils.java
|
package mho.wheels.iterables;
import mho.wheels.structures.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.*;
import java.util.function.Function;
import java.util.function.Predicate;
import static mho.wheels.ordering.Ordering.*;
/**
* Methods for generating and manipulating {@link Iterable}s. The equivalents of every function in Haskell's
* {@code Data.List} module may be found here (except for {@code permutations} and {@code subsequences}, which are in
* {@link mho.wheels.math.Combinatorics}).
*/
public final class IterableUtils {
/**
* Disallow instantiation
*/
private IterableUtils() {}
/**
* Adds an {@code Iterable}'s elements to a {@link Collection}, in the order that the elements appear in the
* {@code Iterable}. Only works for finite {@code Iterable}s.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>{@code collection} must be non-null.</li>
* <li>{@code collection} must be able to hold every element of {@code xs}.</li>
* </ul>
*
* @param xs the {@code Iterable}
* @param collection the {@code Collection} to which the {@code Iterable}'s elements are added
* @param <T> the {@code Iterable}'s element type
*/
public static <T> void addTo(@NotNull Iterable<T> xs, @NotNull Collection<T> collection) {
for (T x : xs) {
collection.add(x);
}
}
/**
* Adds a {@code String}'s characters to a {@code Collection}, in the order that the characters appear in the
* {@code String}.
*
* <ul>
* <li>{@code s} must be non-null.</li>
* <li>{@code collection} must be non-null.</li>
* <li>{@code collection} must be able to hold every character of {@code s}.</li>
* </ul>
*
* @param s the string
* @param collection the collection to which the {@code String}'s characters are added
*/
public static void addTo(@NotNull String s, @NotNull Collection<Character> collection) {
for (int i = 0; i < s.length(); i++) {
collection.add(s.charAt(i));
}
}
/**
* Converts an {@code Iterable} to a {@link List}. Only works for finite {@code Iterable}s. The resulting list may
* be modified, but the modifications will not affect the original {@code Iterable}.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result is non-null.</li>
* </ul>
*
* @param xs the {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return a {@code List} containing the elements of the {@code Iterable} in their original order
*/
public static @NotNull <T> List<T> toList(@NotNull Iterable<T> xs) {
List<T> list = new ArrayList<>();
addTo(xs, list);
return list;
}
/**
* Converts an {@code Iterable} to a {@code List}. Only works for finite {@code Iterable}s.
*
* <ul>
* <li>{@code s} may be any {@code String}.</li>
* <li>The result is non-null.</li>
* </ul>
*
* @param s the {@code String}
* @return a {@code List} containing the characters of {@code s} in their original order
*/
public static @NotNull List<Character> toList(@NotNull String s) {
List<Character> list = new ArrayList<>();
addTo(s, list);
return list;
}
/**
* Creates a {@code String} representation of {@code xs}. Each element is converted to a {@code String} and
* those {@code String}s are placed in a comma-separated list surrounded by square brackets. Only works for finite
* {@code Iterable}s.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result begins with {@code '['} and ends with {@code ']'}.</li>
* </ul>
*
* @param xs the {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return a {@code String} representation of {@code xs}
*/
public static @NotNull <T> String toString(@NotNull Iterable<T> xs) {
return toList(xs).toString();
}
/**
* Creates a {@code String} representation of {@code xs}, displaying at most {@code size} elements. The first
* {@code size} elements are converted to a {@code String} and those {@code String}s are placed in a
* comma-separated list surrounded by square brackets. If the {@code Iterable} contains more than {@code size}
* elements, an ellipsis ({@code ...}) is added at the end of the list.
*
* <ul>
* <li>{@code size} must be non-negative.</li>
* <li>{@code xs} may be any {@code Iterable}.</li>
* <li>The result begins with {@code '['} and ends with {@code ']'}.</li>
* </ul>
*
* @param size the maximum number of elements displayed
* @param xs the {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return a {@code String} representation of {@code xs}
*/
public static @NotNull <T> String toString(int size, @NotNull Iterable<T> xs) {
if (size < 0)
throw new IllegalArgumentException("size cannot be negative");
if (size == 0) {
return isEmpty(xs) ? "[]" : "[...]";
}
List<T> list = toList(take(size + 1, xs));
String listString = toList(take(size, list)).toString();
if (list.size() > size) {
listString = init(listString) + ", ...]";
}
return listString;
}
/**
* Converts a {@code String} to an {@code Iterable} of {@code Character}s. The order of the characters is
* preserved. Uses O(1) additional memory. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code s} must be non-null.</li>
* <li>The result is finite and does not contain any nulls.</li>
* </ul>
*
* @param s the {@code String}
* @return an {@code Iterable} containing all the {@code String}'s characters in their original order
*/
public static @NotNull Iterable<Character> fromString(@NotNull String s) {
return () -> new Iterator<Character>() {
private int i = 0;
@Override
public boolean hasNext() {
return i < s.length();
}
@Override
public Character next() {
return s.charAt(i++);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Creates a {@code String} from an {@code Iterable} of {@code Character}s. The order of the characters is
* preserved. Only works for finite {@code Iterable}s.
*
* <ul>
* <li>{@code cs} must be finite and cannot contain nulls.</li>
* <li>The result is non-null.</li>
* </ul>
*
* @param cs the {@code Iterable} of {@code Character}s
* @return the {@code String} containing all of {@code chars}'s characters in their original order
*/
public static @NotNull String charsToString(@NotNull Iterable<Character> cs) {
StringBuilder sb = new StringBuilder();
for (char c : cs) {
sb.append(c);
}
return sb.toString();
}
/**
* Generates all {@link Byte}s greater than or equal to {@code a}, in order. Does not wrap around after reaching
* {@code Byte.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code byte}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Byte}s ending in 2<sup>7</sup>–1.</li>
* </ul>
*
* Length is 2<sup>7</sup>–{@code a}
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Byte> range(byte a) {
return range(a, Byte.MAX_VALUE);
}
/**
* Generates all {@link Short}s greater than or equal to {@code a}, in order. Does not wrap around after reaching
* {@code Short.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code short}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Short}s ending in 2<sup>15</sup>–1.</li>
* </ul>
*
* Length is 2<sup>15</sup>–{@code a}
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Short> range(short a) {
return range(a, Short.MAX_VALUE);
}
/**
* Generates all {@link Integer}s greater than or equal to {@code a}, in order. Does not wrap around after reaching
* {@code Integer.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code int}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Integer}s ending in
* 2<sup>31</sup>–1.</li>
* </ul>
*
* Length is 2<sup>31</sup>–{@code a}
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Integer> range(int a) {
return range(a, Integer.MAX_VALUE);
}
/**
* Generates all {@link Long}s greater than or equal to {@code a}, in order. Does not wrap around after reaching
* {@code Long.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code long}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Long}s ending in 2<sup>63</sup>–1.</li>
* </ul>
*
* Length is 2<sup>63</sup>–{@code a}
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Long> range(long a) {
return range(a, Long.MAX_VALUE);
}
/**
* Generates all {@link BigInteger}s greater than or equal to {@code a}, in order. The {@code Iterable} produced
* does not support removing elements.
*
* <ul>
* <li>{@code a} must be non-null.</li>
* <li>The result is an infinite {@code Iterable} of consecutive ascending {@code BigInteger}s.</li>
* </ul>
*
* Length is infinite
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<BigInteger> range(@NotNull BigInteger a) {
return iterate(i -> i.add(BigInteger.ONE), a);
}
/**
* Generates all {@link BigDecimal}s of the form {@code a}+n where n is a non-negative integer, in order. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} must be non-null.</li>
* <li>The result is an infinite {@code Iterable} of ascending {@code BigDecimal}s differing by 1.</li>
* </ul>
*
* Length is infinite
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<BigDecimal> range(@NotNull BigDecimal a) {
return iterate(i -> i.add(BigDecimal.ONE), a);
}
/**
* Generates all {@link Character}s greater than or equal to {@code a}, in order. Does not wrap around after
* reaching {@code Character.MAX_VALUE}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code char}.</li>
* <li>The result is an {@code Iterable} of consecutive ascending {@code Character}s ending in
* {@code \uffff}.</li>
* </ul>
*
* Length is 2<sup>16</sup>–{@code a}
*
* @param a the starting value of this {@code Character} sequence
* @return an sequence of consecutive {@code Character}s, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Character> range(char a) {
return range(a, Character.MAX_VALUE);
}
/**
* Generates all {@link float}s roughly of the form {@code a}+n where n is a non-negative integer, in order.
* {@code a} is converted to a {@code BigDecimal} internally to minimize rounding errors. Nonetheless, rounding may
* produce some odd-seeming results: for example, if {@code a} is large, the result might contain runs of identical
* {@code float}s. If {@code a} is {@code -Infinity}, the result is {@code -Infinity} repeating forever. If
* {@code a} is {@code +Infinity}, the result is a single {@code +Infinity}. If {@code a} is negative zero, the
* first element of the result is also negative zero. {@code NaN} is not a legal input. The {@code Iterable}
* produced does not support removing elements.
*
* <ul>
* <li>{@code a} cannot be {@code NaN}.</li>
* <li>The result is either {@code [+Infinity]}, or an infinite non-descending {@code Iterable} of {@code float}s
* roughly differing by 1.</li>
* </ul>
*
* Length is 1 if {@code a} is {@code +Infinity}, infinite otherwise
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Float> range(float a) {
if (Float.isNaN(a))
throw new IllegalArgumentException("cannot begin a range with NaN");
if (Float.isInfinite(a)) {
return a < 0 ? cycle(Arrays.asList(Float.NEGATIVE_INFINITY)) : Arrays.asList(Float.POSITIVE_INFINITY);
}
Iterable<Float> fs = map(BigDecimal::floatValue, range(new BigDecimal(Float.toString(a))));
return Float.valueOf(a).equals(-0.0f) ? cons(-0.0f, tail(fs)): fs;
}
/**
* Generates all {@link double}s roughly of the form {@code a}+n where n is a non-negative integer, in order.
* {@code a} is converted to a {@code BigDecimal} internally to minimize rounding errors. Nonetheless, rounding may
* produce some odd-seeming results: for example, if {@code a} is large, the result might contain runs of identical
* {@code double}s. If {@code a} is {@code -Infinity}, the result is {@code -Infinity} repeating forever. If
* {@code a} is {@code +Infinity}, the result is a single {@code +Infinity}. If {@code a} is negative zero, the
* first element of the result is also negative zero. {@code NaN} is not a legal input. The {@code Iterable}
* produced does not support removing elements.
*
* <ul>
* <li>{@code a} cannot be {@code NaN}.</li>
* <li>The result is either {@code [+Infinity]}, or an infinite non-descending {@code Iterable} of {@code double}s
* roughly differing by 1.</li>
* </ul>
*
* Length is 1 if {@code a} is {@code +Infinity}, infinite otherwise
*
* @param a the starting value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive)
*/
public static @NotNull Iterable<Double> range(double a) {
if (Double.isNaN(a))
throw new IllegalArgumentException("cannot begin a range with NaN");
if (Double.isInfinite(a)) {
return a < 0 ? cycle(Arrays.asList(Double.NEGATIVE_INFINITY)) : Arrays.asList(Double.POSITIVE_INFINITY);
}
Iterable<Double> ds = map(BigDecimal::doubleValue, range(BigDecimal.valueOf(a)));
return Double.valueOf(a).equals(-0.0) ? cons(-0.0, tail(ds)) : ds;
}
/**
* Generates all {@code Byte}s greater than or equal to {@code a} and less than or equal to {@code b}, in order.
* If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code byte}.</li>
* <li>{@code b} may be any {@code byte}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Byte}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Byte> range(byte a, byte b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Byte>() {
private byte x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Byte next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code Short}s greater than or equal to {@code a} and less than or equal to {@code b}, in order.
* If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code short}.</li>
* <li>{@code b} may be any {@code short}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Short}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Short> range(short a, short b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Short>() {
private short x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Short next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code Integer}s greater than or equal to {@code a} and less than or equal to {@code b}, in order.
* If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code int}.</li>
* <li>{@code b} may be any {@code int}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Integer}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Integer> range(int a, int b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Integer>() {
private int x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Integer next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code Long}s greater than or equal to {@code a} and less than or equal to {@code b}, in order.
* If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code long}.</li>
* <li>{@code b} may be any {@code long}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Long}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Long> range(long a, long b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Long>() {
private long x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Long next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code BigInteger}s greater than or equal to {@code a} and less than or equal to {@code b}, in
* order. If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced
* does not support removing elements.
*
* <ul>
* <li>{@code a} must be non-null.</li>
* <li>{@code b} must be non-null.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code BigInteger}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<BigInteger> range(@NotNull BigInteger a, @NotNull BigInteger b) {
if (gt(a, b)) return new ArrayList<>();
return () -> new Iterator<BigInteger>() {
private BigInteger x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigInteger next() {
reachedEnd = x.equals(b);
BigInteger oldX = x;
x = x.add(BigInteger.ONE);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@link BigDecimal}s greater than or equal to {@code a} and less than or equal to {@code b} of the
* form {@code a}+n where n is an integer, in order. If {@code a}{@literal >}{@code b}, an empty {@code Iterable}
* is returned. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} must be non-null.</li>
* <li>{@code b} must be non-null.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive {@code BigDecimal}s differing by 1.</li>
* </ul>
*
* Length is max(⌊{@code b}–{@code a}⌋+1, 0)
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<BigDecimal> range(@NotNull BigDecimal a, @NotNull BigDecimal b) {
if (gt(a, b)) return new ArrayList<>();
return () -> new Iterator<BigDecimal>() {
private BigDecimal x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigDecimal next() {
reachedEnd = gt(x.add(BigDecimal.ONE), b);
BigDecimal oldX = x;
x = x.add(BigDecimal.ONE);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code Character}s greater than or equal to {@code a} and less than or equal to {@code b}, in
* order. If {@code a}{@literal >}{@code b}, an empty {@code Iterable} is returned. The {@code Iterable} produced
* does not support removing elements.
*
* <ul>
* <li>{@code a} may be any {@code char}.</li>
* <li>{@code b} may be any {@code char}.</li>
* <li>The result is a possibly-empty {@code Iterable} of consecutive ascending {@code Character}s.</li>
* </ul>
*
* Length is max({@code b}–{@code a}+1, 0)
*
* @param a the starting value of this {@code Character} sequence
* @param b the ending value of this {@code Character} sequence
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at
* {@code b} (inclusive)
*/
public static @NotNull Iterable<Character> range(char a, char b) {
if (a > b) return new ArrayList<>();
return () -> new Iterator<Character>() {
private char x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Character next() {
reachedEnd = x == b;
return x++;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Generates all {@code float}s greater than or equal to {@code a} and less than or equal to {@code b} roughly of
* the form {@code a}+n where n is a non-negative integer, in order. {@code a} and {@code b} are converted to
* {@code BigDecimal}s internally to minimize rounding errors. Nonetheless, rounding may produce some odd-seeming
* results: for example, if {@code a} is large, the result might contain runs of identical {@code float}s. If
* {@code a}{@literal >}{@code b}, the result is empty. If {@code a}={@code b}, an {@code Iterable} containing only
* {@code a} is returned. If {@code a} is {@code -Infinity} and {@code b} is not {@code -Infinity}, the result is
* {@code -Infinity} repeating forever. If {@code a} is negative zero and {@code b} is nonnegative, the first
* element of the result is also negative zero. Neither {@code a} nor {@code b} may be {@code NaN}. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} cannot be {@code NaN}.</li>
* <li>{@code b} cannot be {@code NaN}.</li>
* <li>The result is a possibly-empty non-descending {@code Iterable} of {@code float}s roughly differing by
* 1.</li>
* </ul>
*
* Length is 0 if {@code a}{@literal >}{@code b}, 1 if {@code a}={@code b}, infinite if {@code a} is
* {@code -Infinity} or {@code b} is {@code Infinity}, and ⌊{@code new BigDecimal(b)}–{@code new BigDecimal(a)}⌋+1
* otherwise
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at the
* largest {@code float} an integer away from {@code a} and less than or equal to {@code b}.
*/
public static @NotNull Iterable<Float> range(float a, float b) {
if (Float.isNaN(a) || Float.isNaN(b))
throw new IllegalArgumentException("cannot begin or end a range with NaN");
if (a == b) return Arrays.asList(a);
if (a > b) return new ArrayList<>();
if (Float.isInfinite(a)) {
return a < 0 ? cycle(Arrays.asList(Float.NEGATIVE_INFINITY)) : Arrays.asList(Float.POSITIVE_INFINITY);
}
if (Float.isInfinite(b)) {
return range(a);
}
Iterable<Float> fs = map(
BigDecimal::floatValue,
range(new BigDecimal(Float.toString(a)), new BigDecimal(Float.toString(b)))
);
return Float.valueOf(a).equals(-0.0f) ? cons(-0.0f, tail(fs)): fs;
}
/**
* Generates all {@code double}s greater than or equal to {@code a} and less than or equal to {@code b} roughly of
* the form {@code a}+n where n is a non-negative integer, in order. {@code a} and {@code b} are converted to
* {@code BigDecimal}s internally to minimize rounding errors. Nonetheless, rounding may produce some odd-seeming
* results: for example, if {@code a} is large, the result might contain runs of identical {@code double}s. If
* {@code a}{@literal >}{@code b}, the result is empty. If {@code a}={@code b}, an {@code Iterable} containing only
* {@code a} is returned. If {@code a} is {@code -Infinity} and {@code b} is not {@code -Infinity}, the result is
* {@code -Infinity} repeating forever. If {@code a} is negative zero and {@code b} is nonnegative, the first
* element of the result is also negative zero. Neither {@code a} nor {@code b} may be {@code NaN}. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code a} cannot be {@code NaN}.</li>
* <li>{@code b} cannot be {@code NaN}.</li>
* <li>The result is a possibly-empty non-descending {@code Iterable} of {@code double}s roughly differing by
* 1.</li>
* </ul>
*
* Length is 0 if {@code a}{@literal >}{@code b}, 1 if {@code a}={@code b}, infinite if {@code a} is
* {@code -Infinity} or {@code b} is {@code Infinity}, and ⌊{@code new BigDecimal(b)}–{@code new BigDecimal(a)}⌋+1
* otherwise
*
* @param a the starting value of this arithmetic progression
* @param b the ending value of this arithmetic progression
* @return an arithmetic progression with an increment of 1, starting at {@code a} (inclusive) and ending at the
* largest {@code double} an integer away from {@code a} and less than or equal to {@code b}.
*/
public static @NotNull Iterable<Double> range(double a, double b) {
if (Double.isNaN(a) || Double.isNaN(b))
throw new IllegalArgumentException("cannot begin or end a range with NaN");
if (a == b) return Arrays.asList(a);
if (a > b) return new ArrayList<>();
if (Double.isInfinite(a)) {
return a < 0 ? cycle(Arrays.asList(Double.NEGATIVE_INFINITY)) : Arrays.asList(Double.POSITIVE_INFINITY);
}
if (Double.isInfinite(b)) {
return range(a);
}
Iterable<Double> ds = map(
BigDecimal::doubleValue,
range(new BigDecimal(Double.toString(a)), new BigDecimal(Double.toString(b)))
);
return Double.valueOf(a).equals(-0.0) ? cons(-0.0, tail(ds)): ds;
}
public static @NotNull Iterable<Byte> rangeBy(byte a, byte i) {
return () -> new Iterator<Byte>() {
private byte x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Byte next() {
byte oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<Short> rangeBy(short a, short i) {
return () -> new Iterator<Short>() {
private short x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Short next() {
short oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<Integer> rangeBy(int a, int i) {
return () -> new Iterator<Integer>() {
private int x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Integer next() {
int oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<Long> rangeBy(long a, long i) {
return () -> new Iterator<Long>() {
private long x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Long next() {
long oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<BigInteger> rangeBy(@NotNull BigInteger a, @NotNull BigInteger i) {
return () -> new Iterator<BigInteger>() {
private BigInteger x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigInteger next() {
BigInteger oldX = x;
x = x.add(i);
reachedEnd = i.signum() == 1 ? lt(x, a) : gt(x, a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<BigDecimal> rangeBy(@NotNull BigDecimal a, @NotNull BigDecimal i) {
return () -> new Iterator<BigDecimal>() {
private BigDecimal x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigDecimal next() {
BigDecimal oldX = x;
x = x.add(i);
reachedEnd = i.signum() == 1 ? lt(x, a) : gt(x, a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<Character> rangeBy(char a, int i) {
return () -> new Iterator<Character>() {
private char x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Character next() {
char oldX = x;
x += i;
reachedEnd = i > 0 ? x < a : x > a;
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Byte> rangeBy(byte a, byte i, byte b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Byte>() {
private byte x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Byte next() {
byte oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Short> rangeBy(short a, short i, short b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Short>() {
private short x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Short next() {
short oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Integer> rangeBy(int a, int i, int b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Integer>() {
private int x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Integer next() {
int oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Long> rangeBy(long a, long i, long b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Long>() {
private long x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Long next() {
long oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<BigInteger> rangeBy(BigInteger a, BigInteger i, BigInteger b) {
if (i.signum() == 1 ? gt(a, b) : gt(b, a)) return new ArrayList<>();
return () -> new Iterator<BigInteger>() {
private BigInteger x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigInteger next() {
BigInteger oldX = x;
x = x.add(i);
reachedEnd = i.signum() == 1 ? gt(x, b) : lt(x, b);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<BigDecimal> rangeBy(BigDecimal a, BigDecimal i, BigDecimal b) {
if (i.signum() == 1 ? gt(a, b) : gt(b, a)) return new ArrayList<>();
return () -> new Iterator<BigDecimal>() {
private BigDecimal x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public BigDecimal next() {
BigDecimal oldX = x;
x = x.add(i);
reachedEnd = i.signum() == 1 ? gt(x, b) : lt(x, b);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static Iterable<Character> rangeBy(char a, int i, char b) {
if (i > 0 ? a > b : b > a) return new ArrayList<>();
return () -> new Iterator<Character>() {
private char x = a;
private boolean reachedEnd;
@Override
public boolean hasNext() {
return !reachedEnd;
}
@Override
public Character next() {
char oldX = x;
x += i;
reachedEnd = i > 0 ? (x > b || x < a) : (x < b || x > a);
return oldX;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code (:)} list constructor. Creates an {@code Iterable} whose first element is
* {@code x} and whose remaining elements are given by {@code xs}. {@code xs} may be infinite, in which case the
* result is also infinite. Uses O(1) additional memory. The {@code Iterable} produced does not support removing
* elements.
*
* <ul>
* <li>{@code x} can be anything.</li>
* <li>{@code xs} must be non-null.</li>
* <li>The result is a non-empty {@code Iterable}.</li>
* </ul>
*
* Result length is |{@code xs}|+1
*
* @param x the first element of the {@code Iterable} to be created
* @param xs the second-through-last elements of the {@code Iterable} to be created
* @param <T> the element type of the {@code Iterable} to be created
* @return the {@code Iterable} to be created
*/
public static @NotNull <T> Iterable<T> cons(@Nullable T x, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private boolean readHead = false;
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return !readHead || xsi.hasNext();
}
@Override
public T next() {
if (readHead) {
return xsi.next();
} else {
readHead = true;
return x;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code (:)} list constructor. Creates a {@code String} whose first character is
* {@code c} and whose remaining characters are given by {@code cs}. Uses O(n) additional memory, where n is the
* length of cs.
*
* <ul>
* <li>{@code c} can be anything.</li>
* <li>{@code cs} must be non-null.</li>
* <li>The result is a non-empty {@code String}.</li>
* </ul>
*
* Result length is |{@code cs}|+1
*
* @param c the first character of the {@code String} to be created
* @param cs the second-through-last characters of the {@code String} to be created
* @return the {@code String} to be created
*/
public static @NotNull String cons(char c, @NotNull String cs) {
return Character.toString(c) + cs;
}
/**
* Equivalent of Haskell's {@code (++)} operator. Creates an {@code Iterable} consisting of {@code xs}'s
* elements followed by {@code ys}'s elements. {@code xs} may be infinite, in which case the result will be equal
* to {@code xs}. {@code ys} may be infinite, in which case the result will also be infinite. Uses O(1)
* additional memory. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>{@code ys} must be non-null.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|+|{@code ys}|
*
* @param xs an {@code Iterable}
* @param ys another {@code Iterable}
* @param <T> the element type of the {@code Iterable} to be created
* @return {@code xs} concatenated with {@code ys}
*/
public static @NotNull <T> Iterable<T> concat(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private final Iterator<T> ysi = ys.iterator();
@Override
public boolean hasNext() {
return xsi.hasNext() || ysi.hasNext();
}
@Override
public T next() {
return (xsi.hasNext() ? xsi : ysi).next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code (++)} operator. Creates a {@code String} consisting of {@code s}'s characters
* followed by {@code t}'s characters. Uses O(n+m) additional memory, where n is the length of {@code s} and m is
* the length of {@code t}.
*
* <ul>
* <li>{@code s} must be non-null.</li>
* <li>{@code t} must be non-null.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code s}|+|{@code t}|
*
* @param s a {@code String}
* @param t a {@code String}
* @return {@code s} concatenated with {@code t}
*/
public static @NotNull String concat(@NotNull String s, @NotNull String t) {
return s + t;
}
/**
* Equivalent of Haskell's {@code head} function. Returns the first element of an {@code Iterable}. Works on
* infinite {@code Iterable}s. Uses O(1) additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return the {@code Iterable}'s first element
*/
public static @Nullable <T> T head(@NotNull Iterable<T> xs) {
return xs.iterator().next();
}
/**
* Equivalent of Haskell's {@code head} function. Returns the first element of a {@code List}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs a {@code List}
* @param <T> the {@code List}'s element type
* @return the {@code List}'s first element
*/
public static @Nullable <T> T head(@NotNull List<T> xs) {
return xs.get(0);
}
/**
* Equivalent of Haskell's {@code head} function. Returns the first element of a {@code SortedSet}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs a {@code SortedSet}
* @param <T> the {@code SortedSet}'s element type
* @return the {@code SortedSet}'s first element
*/
public static @Nullable <T> T head(@NotNull SortedSet<T> xs) {
return xs.first();
}
/**
* Equivalent of Haskell's {@code head} function. Returns the first character of a {@code String}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code s} must be non-empty.</li>
* <li>The result may be any {@code char}.</li>
* </ul>
*
* @param s a {@code String}
* @return the {@code String}'s first character
*/
public static char head(@NotNull String s) {
return s.charAt(0);
}
/**
* Equivalent of Haskell's {@code last} function. Returns the last element of an {@code Iterable}. Only works on
* finite {@code Iterable}s. Uses O(1) additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty and finite.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return the {@code Iterable}'s last element
*/
public static @Nullable <T> T last(@NotNull Iterable<T> xs) {
T previous = null;
boolean empty = true;
for (T x : xs) {
empty = false;
previous = x;
}
if (empty)
throw new NoSuchElementException();
return previous;
}
/**
* Equivalent of Haskell's {@code last} function. Returns the last element of a {@code List}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs a {@code List}
* @param <T> the {@code List}'s element type
* @return the {@code List}'s last element
*/
public static @Nullable <T> T last(@NotNull List<T> xs) {
return xs.get(xs.size() - 1);
}
/**
* Equivalent of Haskell's {@code last} function. Returns the last element of a {@code SortedSet}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result may be anything.</li>
* </ul>
*
* @param xs a {@code SortedSet}
* @param <T> the {@code SortedSet}'s element type
* @return the {@code SortedSet}'s last element
*/
public static @Nullable <T> T last(@NotNull SortedSet<T> xs) {
return xs.last();
}
/**
* Equivalent of Haskell's {@code last} function. Returns the last character of a {@code String}. Uses O(1)
* additional memory.
*
* <ul>
* <li>{@code s} must be non-empty.</li>
* <li>The result may be any {@code char}.</li>
* </ul>
*
* @param s a {@code String}
* @return the {@code String}'s last character
*/
public static char last(@NotNull String s) {
return s.charAt(s.length() - 1);
}
/**
* Equivalent of Haskell's {@code tail} function. Returns all elements of an {@code Iterable} but the first.
* {@code xs} may be infinite, in which the result will also be infinite. Uses O(1) additional memory. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|–1
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return an {@code Iterable} containing all elements of {@code xs} but the first
*/
public static @NotNull <T> Iterable<T> tail(@NotNull Iterable<T> xs) {
if (isEmpty(xs))
throw new NoSuchElementException();
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
{
xsi.next();
}
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code tail} function. Given a {@code String}, returns a {@code String} containing
* all of its characters but the first. Uses O(n) additional memory, where n is the length of {@code s}.
*
* <ul>
* <li>{@code s} must be non-empty.</li>
* <li>The result may be any {@code char}.</li>
* </ul>
*
* Result length is |{@code s}|–1
*
* @param s a {@code String}
* @return a {@code String} containing all characters of {@code s} but the first
*/
public static @NotNull String tail(@NotNull String s) {
return s.substring(1);
}
/**
* Equivalent of Haskell's {@code init} function. Returns all elements of an {@code Iterable} but the last.
* {@code xs} may be infinite, in which the result will be {@code xs}. Uses O(1) additional memory. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code xs} must be non-empty.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|–1
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return an {@code Iterable} containing all elements of {@code xs} but the last
*/
public static @NotNull <T> Iterable<T> init(@NotNull Iterable<T> xs) {
if (isEmpty(xs))
throw new NoSuchElementException();
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next = xsi.next();
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
T oldNext = next;
next = xsi.next();
return oldNext;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
/**
* Equivalent of Haskell's {@code tail} function. Given a {@code String}, returns a {@code String} containing
* all of its characters but the last. Uses O(n) additional memory, where n is the length of {@code s}.
*
* <ul>
* <li>{@code s} must be non-empty.</li>
* <li>The result may be any {@code char}.</li>
* </ul>
*
* Result length is |{@code s}|–1
*
* @param s a {@code String}
* @return a {@code String} containing all characters of {@code s} but the last
*/
public static @NotNull String init(@NotNull String s) {
return s.substring(0, s.length() - 1);
}
/**
* Equivalent of Haskell's {@code null} function. Tests whether an {@code Iterable} contains no elements.
* {@code xs} may be infinite. Uses O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result may be either {@code boolean}.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return whether {@code xs} is empty
*/
public static <T> boolean isEmpty(@NotNull Iterable<T> xs) {
return !xs.iterator().hasNext();
}
/**
* Equivalent of Haskell's {@code null} function. Tests whether a {@code Collection} contains no elements. Uses
* O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result may be either {@code boolean}.</li>
* </ul>
*
* @param xs a {@code Collection}
* @param <T> the {@code Collection}'s element type
* @return whether {@code xs} is empty
*/
public static <T> boolean isEmpty(@NotNull Collection<T> xs) {
return xs.isEmpty();
}
/**
* Equivalent of Haskell's {@code null} function. Tests whether a {@code String} contains no characters. Uses
* O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result may be either {@code boolean}.</li>
* </ul>
*
* @param s a {@code String}
* @return whether {@code s} is empty
*/
public static boolean isEmpty(@NotNull String s) {
return s.isEmpty();
}
/**
* Equivalent of Haskell's {@code length} function. Returns the number of elements in an {@code Iterable}. Only
* works on finite {@code Iterable}s. Uses O(1) additional space.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result is non-negative.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return the {@code Iterable}'s length
*/
public static <T> int length(@NotNull Iterable<T> xs) {
int i = 0;
for (T x : xs) {
i++;
}
return i;
}
/**
* Equivalent of Haskell's {@code length} function. Returns the number of elements in an {@code Iterable}. Only
* works on finite {@code Iterable}s. Uses O(log(n)) additional space, where n is {@code xs}'s length; but it's
* effectively constant space.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result is non-negative.</li>
* </ul>
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return the {@code Iterable}'s length
*/
public static @NotNull <T> BigInteger bigIntegerLength(@NotNull Iterable<T> xs) {
BigInteger i = BigInteger.ZERO;
for (T x : xs) {
i = i.add(BigInteger.ONE);
}
return i;
}
/**
* Equivalent of Haskell's {@code length} function. Returns the number of elements in a {@code Collection}. Uses
* O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result is non-negative.</li>
* </ul>
*
* @param xs a {@code Collection}
* @param <T> the {@code Collection}'s element type
* @return the {@code Collection}'s length
*/
public static <T> int length(@NotNull Collection<T> xs) {
return xs.size();
}
/**
* Equivalent of Haskell's {@code length} function. Returns the number of characters in a {@code String}. Uses
* O(1) additional space.
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>The result is non-negative.</li>
* </ul>
*
* @param s a {@code String}
* @return the {@code String}'s length
*/
public static int length(@NotNull String s) {
return s.length();
}
//todo docs
public static <T> boolean lengthAtLeast(int length, @NotNull Iterable<T> xs) {
int i = 0;
for (T x : xs) {
i++;
if (i >= length) return true;
}
return false;
}
public static <T> boolean lengthAtLeast(int length, @NotNull Collection<T> xs) {
return xs.size() >= length;
}
public static <T> boolean lengthAtLeast(int length, @NotNull String s) {
return s.length() >= length;
}
/**
* Equivalent of Haskell's {@code map} function. Transforms one {@code Iterable} into another by applying a
* function to each element. {@code xs} may be infinite, in which case the result is also infinite. Uses O(1)
* additional memory. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code f} must be non-null.</li>
* <li>{@code xs} must be non-null.</li>
* <li>{@code xs} must only contain elements that are valid inputs for {@code f}.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|
*
* @param f the function that transforms each element in the {@code Iterable}
* @param xs the {@code Iterable}
* @param <A> the type of the original {@code Iterable}'s elements
* @param <B> the type of the output {@code Iterable}'s elements
* @return an {@code Iterable} containing the elements of {@code xs} transformed by {@code f}
*/
public static @NotNull <A, B> Iterable<B> map(@NotNull Function<A, B> f, @NotNull Iterable<A> xs) {
return () -> new Iterator<B>() {
private final Iterator<A> xsi = xs.iterator();
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public B next() {
return f.apply(xsi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code map} function. Transforms one {@code String} into another by applying a
* function to each character. Uses O(n) additional memory, where n is the length of the input string.
*
* <ul>
* <li>{@code f} must be non-null.</li>
* <li>{@code xs} must be non-null.</li>
* <li>{@code xs} must only contain characters that are valid inputs for {@code f}.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code s}|
*
* @param f the function that transforms each character in the {@code String}
* @param s the {@code String}
* @return a {@code String} containing the characters of {@code s} transformed by {@code f}
*/
public static @NotNull String map(@NotNull Function<Character, Character> f, @NotNull String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
sb.append(f.apply(s.charAt(i)));
}
return sb.toString();
}
/**
* Equivalent of Haskell's {@code reverse} function. Reverses an {@code Iterable}. {@code xs} must be finite.
* Uses O(n) additional memory, where n is the length of {@code xs}. The resulting list may be modified, but the
* modifications will not affect the original {@code Iterable}.
*
* <ul>
* <li>{@code xs} must be finite.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code xs}|
*
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return a {@code List} containing {@code xs}'s elements in reverse order
*/
public static @NotNull <T> List<T> reverse(@NotNull Iterable<T> xs) {
List<T> list = toList(xs);
Collections.reverse(list);
return list;
}
/**
* Equivalent of Haskell's {@code reverse} function. Reverses a {@code String}. Uses O(n) additional memory,
* where n is the length of {@code s}.
*
* <ul>
* <li>{@code s} must be non-null.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is |{@code s}|
*
* @param s a {@code String}
* @return a {@code String} containing {@code s}'s characters in reverse order
*/
public static @NotNull String reverse(@NotNull String s) {
char[] reversed = new char[s.length()];
for (int i = 0; i < s.length() / 2; i++) {
int j = s.length() - i - 1;
reversed[i] = s.charAt(j);
reversed[j] = s.charAt(i);
}
if ((s.length() & 1) == 1) {
int i = s.length() / 2;
reversed[i] = s.charAt(i);
}
return new String(reversed);
}
/**
* Equivalent of Haskell's {@code intersperse} function. Given an {@code Iterable} {@code xs} and a seperator
* {@code sep}, returns an {@code Iterable} consisting of the elements of {@code xs} with {@code sep} between
* every adjacent pair. {@code xs} may be infinite, in which case the result is also infinite. Uses O(1)
* additional memory. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code sep} may be anything.</li>
* <li>{@code xs} must be non-null.</li>
* <li>The result is an {@code Iterable} whose odd-indexed (using 0-based indexing) elements are identical.</li>
* </ul>
*
* Result length is 0 when |{@code xs}|=0, 2|{@code xs}|–1 otherwise
*
* @param sep a separator
* @param xs an {@code Iterable}
* @param <T> the {@code Iterable}'s element type
* @return an {@code Iterable} consisting of the elements of {@code xs} interspersed with {@code sep}
*/
public static @NotNull <T> Iterable<T> intersperse(@Nullable T sep, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private boolean separating = false;
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
if (separating) {
separating = false;
return sep;
} else {
separating = true;
return xsi.next();
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code intersperse} function. Given a {@code String} {@code s} and a seperator
* {@code sep}, returns a {@code String} consisting of the characters of {@code s} with {@code sep} between
* every adjacent pair. Uses O(n) additional memory, where n is the length of {@code s}.
*
* <ul>
* <li>{@code sep} may be any {@code char}.</li>
* <li>{@code s} must be non-null.</li>
* <li>The result is a {@code String} whose odd-indexed (using 0-based indexing) characters are identical.</li>
* </ul>
*
* Result length is 0 when |{@code s}|=0, 2|{@code s}|–1 otherwise
*
* @param sep a separator
* @param s a {@code String}
* @return a {@code String} consisting of the characters of {@code s} interspersed with {@code sep}
*/
public static @NotNull String intersperse(char sep, @NotNull String s) {
if (s.isEmpty()) return "";
StringBuilder sb = new StringBuilder();
sb.append(s.charAt(0));
for (int i = 1; i < s.length(); i++) {
sb.append(sep);
sb.append(s.charAt(i));
}
return sb.toString();
}
/**
* Equivalent of Haskell's {@code intercalate} function. Inserts an {@code Iterable} between every two adjacent
* {@code Iterable}s in an {@code Iterable} of {@code Iterable}s, flattening the result. {@code xss}, any
* element of {@code xss}, or {@code xs} may be infinite, in which case the result is also infinite. Uses O(1)
* additional memory. The {@code Iterable} produced does not support removing elements.
*
* Result length is the sum of the lengths of {@code xs}'s elements and (0 if |{@code xss}|=0,
* |{@code xss}|(|{@code xs}|–1) otherwise)
*
* <ul>
* <li>{@code xs} must be non-null.</li>
* <li>{@code xss} must be non-null.</li>
* <li>The result is non-null.</li>
* </ul>
*
* @param xs the separating {@code Iterable}
* @param xss the separated {@code Iterable}
* @param <T> the resulting {@code Iterable}'s element type
* @return {@code xss} intercalated by {@code xs}
*/
public static @NotNull <T> Iterable<T> intercalate(@NotNull Iterable<T> xs, @NotNull Iterable<Iterable<T>> xss) {
return concat(intersperse(xs, xss));
}
/**
* Equivalent of Haskell's {@code intercalate} function. Inserts a {@code String} between every two adjacent
* {@code String}s in an {@code Iterable} of {@code String}s, flattening the result. Uses O(abc) additional
* memory, where a is the length of {@code strings}, b is the maximum length of any string in {@code strings},
* and c is the length of {@code sep}.
* The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code sep} must be non-null.</li>
* <li>{@code strings} must be finite.</li>
* <li>The result is non-null.</li>
* </ul>
*
* Result length is the sum of the lengths of {@code xs}'s elements and (0 if |{@code strings}|=0,
* |{@code strings}|(|{@code sep}|–1) otherwise)
*
* @param sep the separating {@code String}
* @param strings the separated {@code String}s
* @return {@code strings} intercalated by {@code sep}
*/
public static @NotNull String intercalate(@NotNull String sep, @NotNull Iterable<String> strings) {
return concatStrings(intersperse(sep, strings));
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code Iterables}. If the rows have different lengths, then the "overhanging" elements still end up in the
* result. See test cases for examples. Any element of {@code xss} may be infinite, in which case the result will
* be infinite. Uses O(nm) additional memory, where n is then length of {@code xss} and m is the largest amount of
* memory used by any {@code Iterable} in {@code xss}. The {@code Iterable} produced does not support removing
* elements.
*
* <ul>
* <li>{@code xss} must be finite.</li>
* <li>The lengths of the result's elements are finite, non-increasing, and never 0.</li>
* </ul>
*
* Result length is the maximum length of {@code xss}'s elements
*
* @param xss an {@code Iterable} of {@code Iterable}s
* @param <T> the {@code Iterable}'s elements' element type
* @return {@code xss}, transposed
*/
public static @NotNull <T> Iterable<List<T>> transpose(@NotNull Iterable<Iterable<T>> xss) {
return () -> new Iterator<List<T>>() {
private final List<Iterator<T>> iterators = toList(map(Iterable::iterator, xss));
@Override
public boolean hasNext() {
return any(Iterator::hasNext, iterators);
}
@Override
public List<T> next() {
List<T> nextList = new ArrayList<>();
for (Iterator<T> iterator : iterators) {
if (iterator.hasNext()) {
nextList.add(iterator.next());
}
}
return nextList;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code String}s. If the rows have different lengths, then the "overhanging" characters still end up in the
* result. See test cases for examples. Uses O(nm) additional memory, where n is then length of {@code xss} and m
* is the length of the longest {@code String} in {@code xss}. The {@code Iterable} produced does not support
* removing elements.
*
* <ul>
* <li>{@code strings} must be non-null.</li>
* <li>The lengths of the result's elements are non-increasing and never 0.</li>
* </ul>
*
* Result length is the maximum length of {@code strings}'s elements
*
* @param strings an {@code Iterable} of {@code String}s
* @return {@code strings}, transposed
*/
public static @NotNull Iterable<String> transposeStrings(@NotNull Iterable<String> strings) {
return map(
IterableUtils::charsToString,
transpose(map(s -> fromString(s), strings))
);
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code Iterables}. If the rows have different lengths, then the "overhanging" elements will be truncated; the
* result's rows will all have equal lengths. See test cases for examples. Any element of {@code xss} may be
* infinite, in which case the result will be infinite. Uses O(nm) additional memory, where n is then length of
* {@code xss} and m is the largest amount of memory used by any {@code Iterable} in {@code xss}. The
* {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code xss} must be finite.</li>
* <li>The lengths of the result's elements are finite and equal.</li>
* </ul>
*
* Result length is the minimum length of {@code xss}'s elements
*
* @param xss an {@code Iterable} of {@code Iterable}s
* @param <T> the {@code Iterable}'s elements' element type
* @return {@code xss}, transposed
*/
public static @NotNull <T> Iterable<List<T>> transposeTruncating(@NotNull Iterable<Iterable<T>> xss) {
return () -> new Iterator<List<T>>() {
private final List<Iterator<T>> iterators = toList(map(Iterable::iterator, xss));
@Override
public boolean hasNext() {
return !iterators.isEmpty() && all(Iterator::hasNext, iterators);
}
@Override
public List<T> next() {
List<T> nextList = new ArrayList<>();
for (Iterator<T> iterator : iterators) {
nextList.add(iterator.next());
}
return nextList;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code String}s. If the rows have different lengths, then the "overhanging" characters will be truncated. See
* test cases for examples. Uses O(nm) additional memory, where n is then length of {@code xss} and m is the
* length of the longest {@code String} in {@code xss}. The {@code Iterable} produced does not support removing
* elements.
*
* <ul>
* <li>{@code strings} must be non-null.</li>
* <li>The lengths of the result's elements are equal.</li>
* </ul>
*
* Result length is the minimum length of {@code strings}'s elements
*
* @param strings an {@code Iterable} of {@code String}s
* @return {@code strings}, transposed
*/
public static @NotNull Iterable<String> transposeStringsTruncating(@NotNull Iterable<String> strings) {
return map(
IterableUtils::charsToString,
transposeTruncating(map(s -> fromString(s), strings))
);
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code Iterables}. If the rows have different lengths, then the gaps will be padded; the result's rows will all
* have equal lengths. See test cases for examples. Any element of {@code xss} may be infinite, in which case the
* result will be infinite. Uses O(nm) additional memory, where n is then length of {@code xss} and m is the
* largest amount of memory used by any {@code Iterable} in {@code xss}. The {@code Iterable} produced does not
* support removing elements.
*
* <ul>
* <li>{@code xss} must be finite.</li>
* <li>The lengths of the result's elements are equal.</li>
* </ul>
*
* Result length is the maximum length of {@code xss}'s elements
*
* @param xss an {@code Iterable} of {@code Iterable}s
* @param pad the padding
* @param <T> the {@code Iterable}'s elements' element type
* @return {@code xss}, transposed
*/
public static @NotNull <T> Iterable<Iterable<T>> transposePadded(
@Nullable T pad,
@NotNull Iterable<Iterable<T>> xss
) {
return () -> new Iterator<Iterable<T>>() {
private final List<Iterator<T>> iterators = toList(map(Iterable::iterator, xss));
@Override
public boolean hasNext() {
return any(Iterator::hasNext, iterators);
}
@Override
public Iterable<T> next() {
List<T> nextList = new ArrayList<>();
for (Iterator<T> iterator : iterators) {
nextList.add(iterator.hasNext() ? iterator.next() : pad);
}
return nextList;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
/**
* Equivalent of Haskell's {@code transpose} function. Swaps rows and columns of an {@code Iterable} of
* {@code String}s. If the rows have different lengths, then the gaps will be padded; the result's rows will all
* have equal lengths. Uses O(nm) additional memory, where n is then length of {@code xss} and m is the length of
* the longest {@code String} in {@code xss}. The {@code Iterable} produced does not support removing elements.
*
* <ul>
* <li>{@code strings} must be non-null.</li>
* <li>The lengths of the result's elements are equal.</li>
* </ul>
*
* Result length is the maximum length of {@code strings}'s elements
*
* @param strings an {@code Iterable} of {@code String}s
* @param pad the padding
* @return {@code strings}, transposed
*/
public static @NotNull Iterable<String> transposeStringsPadded(char pad, @NotNull Iterable<String> strings) {
return map(
IterableUtils::charsToString,
transposePadded(pad, map(s -> fromString(s), strings))
);
}
public static @Nullable <A, B> B foldl(
@NotNull Function<Pair<B, A>, B> f,
@Nullable B z,
@NotNull Iterable<A> xs
) {
B result = z;
for (A x : xs) {
result = f.apply(new Pair<B, A>(result, x));
}
return result;
}
public static @Nullable <A> A foldl1(@NotNull Function<Pair<A, A>, A> f, @NotNull Iterable<A> xs) {
A result = null;
boolean started = false;
for (A x : xs) {
if (started) {
result = f.apply(new Pair<A, A>(result, x));
} else {
result = x;
started = true;
}
}
return result;
}
public static @Nullable <A, B> B foldr(
@NotNull Function<Pair<A, B>, B> f,
@Nullable B z,
@NotNull Iterable<A> xs
) {
return foldl(p -> f.apply(new Pair<>(p.b, p.a)), z, reverse(xs));
}
public static @Nullable <A> A foldr1(@NotNull Function<Pair<A, A>, A> f, @NotNull Iterable<A> xs) {
return foldl1(p -> f.apply(new Pair<>(p.b, p.a)), reverse(xs));
}
public static @NotNull <T> Iterable<T> concat(@NotNull Iterable<Iterable<T>> xss) {
return () -> new Iterator<T>() {
final Iterator<Iterable<T>> xssi = xss.iterator();
Iterator<T> xsi = xssi.hasNext() ? xssi.next().iterator() : null;
@Override
public boolean hasNext() {
if (xsi == null) return false;
while (!xsi.hasNext()) {
if (!xssi.hasNext()) return false;
xsi = xssi.next().iterator();
}
return true;
}
@Override
public T next() {
hasNext();
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String concatStrings(@NotNull Iterable<String> strings) {
StringBuilder sb = new StringBuilder();
for (String s : strings) {
sb.append(s);
}
return sb.toString();
}
public static @NotNull <A, B> Iterable<B> concatMap(@NotNull Function<A, Iterable<B>> f, @NotNull Iterable<A> xs) {
return concat(map(f, xs));
}
public static boolean and(@NotNull Iterable<Boolean> xs) {
for (boolean x : xs) {
if (!x) return false;
}
return true;
}
public static boolean or(@NotNull Iterable<Boolean> xs) {
for (boolean x : xs) {
if (x) return true;
}
return false;
}
public static <T> boolean any(@NotNull Predicate<T> predicate, @NotNull Iterable<T> xs) {
for (T x : xs) {
if (predicate.test(x)) return true;
}
return false;
}
public static <T> boolean all(@NotNull Predicate<T> predicate, @NotNull Iterable<T> xs) {
for (T x : xs) {
if (!predicate.test(x)) return false;
}
return true;
}
public static byte sumByte(@NotNull Iterable<Byte> xs) {
return foldl(p -> (byte) (p.a + p.b), (byte) 0, xs);
}
public static short sumShort(@NotNull Iterable<Short> xs) {
return foldl(p -> (short) (p.a + p.b), (short) 0, xs);
}
public static int sumInteger(@NotNull Iterable<Integer> xs) {
return foldl(p -> p.a + p.b, 0, xs);
}
public static long sumLong(@NotNull Iterable<Long> xs) {
return foldl(p -> p.a + p.b, 0L, xs);
}
public static float sumFloat(@NotNull Iterable<Float> xs) {
return foldl(p -> p.a + p.b, 0.0f, xs);
}
public static double sumDouble(Iterable<Double> xs) {
return foldl(p -> p.a + p.b, 0.0, xs);
}
public static @NotNull BigInteger sumBigInteger(@NotNull Iterable<BigInteger> xs) {
return foldl(p -> p.a.add(p.b), BigInteger.ZERO, xs);
}
public static @NotNull BigDecimal sumBigDecimal(@NotNull Iterable<BigDecimal> xs) {
return foldl(p -> p.a.add(p.b), BigDecimal.ZERO, xs);
}
public static byte productByte(@NotNull Iterable<Byte> xs) {
return foldl(p -> (byte) (p.a * p.b), (byte) 1, xs);
}
public static short productShort(@NotNull Iterable<Short> xs) {
return foldl(p -> (short) (p.a * p.b), (short) 1, xs);
}
public static int productInteger(@NotNull Iterable<Integer> xs) {
return foldl(p -> p.a * p.b, 1, xs);
}
public static long productLong(@NotNull Iterable<Long> xs) {
return foldl(p -> p.a * p.b, 1L, xs);
}
public static float productFloat(@NotNull Iterable<Float> xs) {
return foldl(p -> p.a * p.b, 1.0f, xs);
}
public static double productDouble(@NotNull Iterable<Double> xs) {
return foldl(p -> p.a * p.b, 1.0, xs);
}
public static @NotNull BigInteger productBigInteger(Iterable<BigInteger> xs) {
return foldl(p -> p.a.multiply(p.b), BigInteger.ONE, xs);
}
public static @NotNull BigDecimal productBigDecimal(@NotNull Iterable<BigDecimal> xs) {
return foldl(p -> p.a.multiply(p.b), BigDecimal.ONE, xs);
}
public static @NotNull <T extends Comparable<T>> T maximum(@NotNull Iterable<T> xs) {
return foldl1(p -> max(p.a, p.b), xs);
}
public static char maximum(@NotNull String s) {
return foldl1(p -> max(p.a, p.b), fromString(s));
}
public static @NotNull <T extends Comparable<T>> T minimum(@NotNull Iterable<T> xs) {
return foldl1(p -> min(p.a, p.b), xs);
}
public static char minimum(@NotNull String s) {
return foldl1(p -> min(p.a, p.b), fromString(s));
}
public static @NotNull <A, B> Iterable<B> scanl(
@NotNull Function<Pair<B, A>, B> f,
@Nullable B z,
@NotNull Iterable<A> xs
) {
return () -> new Iterator<B>() {
private final Iterator<A> xsi = xs.iterator();
private B result = z;
private boolean firstTime = true;
@Override
public boolean hasNext() {
return firstTime || xsi.hasNext();
}
@Override
public B next() {
if (firstTime) {
firstTime = false;
return result;
} else {
result = f.apply(new Pair<B, A>(result, xsi.next()));
return result;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <A> Iterable<A> scanl1(@NotNull Function<Pair<A, A>, A> f, @NotNull Iterable<A> xs) {
return scanl(f, head(xs), tail(xs));
}
public static @NotNull <A, B> Iterable<B> scanr(
@NotNull Function<Pair<A, B>, B> f,
@NotNull B z,
@NotNull Iterable<A> xs
) {
return scanl(p -> f.apply(new Pair<A, B>(p.b, p.a)), z, reverse(xs));
}
public static @NotNull <A> Iterable<A> scanr1(@NotNull Function<Pair<A, A>, A> f, @NotNull Iterable<A> xs) {
return scanl1(p -> f.apply(new Pair<A, A>(p.b, p.a)), reverse(xs));
}
public static @NotNull <X, Y, ACC> Pair<ACC, List<Y>> mapAccumL(
@NotNull Function<Pair<ACC, X>, Pair<ACC, Y>> f,
@Nullable ACC s,
@NotNull Iterable<X> xs
) {
List<Y> ys = new ArrayList<Y>();
for (X x : xs) {
Pair<ACC, Y> p = f.apply(new Pair<ACC, X>(s, x));
s = p.a;
ys.add(p.b);
}
return new Pair<>(s, ys);
}
public static @NotNull <X, Y, ACC> Pair<ACC, List<Y>> mapAccumR(
@NotNull Function<Pair<ACC, X>, Pair<ACC, Y>> f,
@Nullable ACC s,
@NotNull Iterable<X> xs) {
return mapAccumL(f, s, reverse(xs));
}
public static @NotNull <T> Iterable<T> iterate(@NotNull Function<T, T> f, @Nullable T x) {
return () -> new Iterator<T>() {
private T current = x;
private boolean firstTime = true;
@Override
public boolean hasNext() {
return true;
}
@Override
public T next() {
if (firstTime) {
firstTime = false;
} else {
current = f.apply(current);
}
return current;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> repeat(@Nullable T x) {
return () -> new Iterator<T>() {
@Override
public boolean hasNext() {
return true;
}
@Override
public T next() {
return x;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> replicate(int n, @Nullable T x) {
return () -> new Iterator<T>() {
private int i = 0;
@Override
public boolean hasNext() {
return i < n;
}
@Override
public T next() {
i++;
return x;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> replicate(@NotNull BigInteger n, @Nullable T x) {
return () -> new Iterator<T>() {
private BigInteger i = BigInteger.ZERO;
@Override
public boolean hasNext() {
return lt(i, n);
}
@Override
public T next() {
i = i.add(BigInteger.ONE);
return x;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String replicate(int n, char c) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < n; i++) {
sb.append(c);
}
return sb.toString();
}
public static @NotNull String replicate(@NotNull BigInteger n, char c) {
StringBuilder sb = new StringBuilder();
for (BigInteger i : range(BigInteger.ONE, n)) {
sb.append(c);
}
return sb.toString();
}
public static @NotNull <T> Iterable<T> cycle(@NotNull Iterable<T> xs) {
if (isEmpty(xs)) return xs;
return () -> new Iterator<T>() {
private Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return true;
}
@Override
public T next() {
if (!xsi.hasNext()) xsi = xs.iterator();
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <A, B> Iterable<A> unfoldr(@NotNull Function<B, Optional<Pair<A, B>>> f, @NotNull B x) {
return new Iterable<A>() {
@Override
public Iterator<A> iterator() {
return new Iterator<A>() {
private boolean hasNext = true;
private A next;
private B seed = x;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public A next() {
A oldNext = next;
advance();
return oldNext;
}
private void advance() {
Optional<Pair<A, B>> p = f.apply(seed);
if (p.isPresent()) {
next = p.get().a;
seed = p.get().b;
} else {
hasNext = false;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull <T> Iterable<T> take(int n, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private int i = 0;
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return i < n && xsi.hasNext();
}
@Override
public T next() {
i++;
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> take(@NotNull BigInteger n, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private BigInteger i = BigInteger.ZERO;
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return lt(i, n) && xsi.hasNext();
}
@Override
public T next() {
i = i.add(BigInteger.ONE);
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String take(int n, @NotNull String s) {
return s.substring(0, n);
}
public static @NotNull String take(@NotNull BigInteger n, @NotNull String s) {
return s.substring(0, n.intValueExact());
}
public static @NotNull <T> Iterable<T> drop(int n, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
{
int i = n;
while (xsi.hasNext()) {
if (i <= 0) break;
xsi.next();
i--;
}
}
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<T> drop(@NotNull BigInteger n, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
{
BigInteger i = n;
while (xsi.hasNext()) {
if (le(i, BigInteger.ZERO)) break;
xsi.next();
i = i.subtract(BigInteger.ONE);
}
}
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public T next() {
return xsi.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String drop(int n, @NotNull String s) {
return s.substring(n);
}
public static @NotNull String drop(@NotNull BigInteger n, @NotNull String s) {
return s.substring(n.intValueExact());
}
public static @NotNull <T> Iterable<T> pad(@NotNull T pad, int length, @NotNull Iterable<T> xs) {
if (length < 0)
throw new IllegalArgumentException("cannot pad with a negative length");
return take(length, concat(xs, repeat(pad)));
}
public static @NotNull <T> Iterable<T> pad(@NotNull T pad, @NotNull BigInteger length, @NotNull Iterable<T> xs) {
if (length.signum() == -1)
throw new IllegalArgumentException("cannot pad with a negative length");
return take(length, (Iterable<T>) concat(xs, repeat(pad)));
}
public static @NotNull String pad(char pad, int length, @NotNull String s) {
if (s.length() == length) return s;
if (s.length() > length) return take(length, s);
return s + replicate(length - s.length(), pad);
}
public static @NotNull String pad(char pad, @NotNull BigInteger length, @NotNull String s) {
if (s.length() == length.intValueExact()) return s;
if (s.length() > length.intValueExact()) return take(length, s);
return s + replicate(length.intValueExact() - s.length(), pad);
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> splitAt(int n, @NotNull Iterable<T> xs) {
return new Pair<>(take(n, xs), drop(n, xs));
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> splitAt(@NotNull BigInteger n, @NotNull Iterable<T> xs) {
return new Pair<>(take(n, xs), drop(n, xs));
}
public static @NotNull Pair<String, String> splitAt(int n, @NotNull String s) {
return new Pair<>(s.substring(0, n), s.substring(n));
}
public static @NotNull Pair<String, String> splitAt(@NotNull BigInteger i, @NotNull String s) {
return splitAt(i.intValueExact(), s);
}
public static @NotNull <T> Iterable<T> takeWhile(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
if (xsi.hasNext()) {
next = xsi.next();
hasNext = p.test(next);
} else {
hasNext = false;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String takeWhile(@NotNull Predicate<Character> p, @NotNull String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (!p.test(c)) break;
sb.append(c);
}
return sb.toString();
}
public static @NotNull <T> Iterable<T> stopAt(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
if (next != null && p.test(next)) {
hasNext = false;
} else {
hasNext = xsi.hasNext();
if (xsi.hasNext()) {
next = xsi.next();
}
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String stopAt(@NotNull Predicate<Character> p, @NotNull String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
sb.append(c);
if (p.test(c)) break;
}
return sb.toString();
}
public static @NotNull <T> Iterable<T> dropWhile(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return () -> new Iterator<T>() {
private Iterator<T> xsi = xs.iterator();
private T x;
private boolean first = false;
{
while (xsi.hasNext()) {
x = xsi.next();
if (!p.test(x)) {
first = true;
break;
}
}
}
@Override
public boolean hasNext() {
return first || xsi.hasNext();
}
@Override
public T next() {
if (first) {
first = false;
return x;
} else {
return xsi.next();
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull String dropWhile(@NotNull Predicate<Character> p, @NotNull String s) {
int startIndex = -1;
for (int i = 0; i < s.length(); i++) {
if (p.test(s.charAt(i))) {
startIndex = i;
break;
}
}
return startIndex == -1 ? "" : s.substring(startIndex);
}
public static @NotNull <T> Iterable<T> dropWhileEnd(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
List<T> list = toList(xs);
int index = -1;
for (int i = list.size() - 1; i >= 0; i--) {
if (!p.test(list.get(i))) {
index = i;
break;
}
}
return take(index + 1, list);
}
public static @NotNull String dropWhileEnd(@NotNull Predicate<Character> p, @NotNull String s) {
int index = -1;
for (int i = s.length() - 1; i >= 0; i--) {
if (!p.test(s.charAt(i))) {
index = i;
break;
}
}
return take(index + 1, s);
}
public static @NotNull <T> Iterable<List<T>> chunk(int size, @NotNull Iterable<T> xs) {
return () -> new Iterator<List<T>>() {
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public List<T> next() {
List<T> chunk = new ArrayList<>();
for (int i = 0; i < size; i++) {
if (!xsi.hasNext()) break;
chunk.add(xsi.next());
}
return chunk;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull Iterable<String> chunk(int size, @NotNull String s) {
return () -> new Iterator<String>() {
private int i = 0;
@Override
public boolean hasNext() {
return i != s.length();
}
@Override
public String next() {
StringBuilder sb = new StringBuilder();
for (int j = 0; j < size; j++) {
if (i == s.length()) break;
sb.append(s.charAt(i++));
}
return sb.toString();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Iterable<List<T>> chunkPadded(@Nullable T pad, int size, @NotNull Iterable<T> xs) {
return () -> new Iterator<List<T>>() {
private final Iterator<T> xsi = xs.iterator();
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public List<T> next() {
List<T> chunk = new ArrayList<>();
for (int i = 0; i < size; i++) {
chunk.add(xsi.hasNext() ? xsi.next() : pad);
}
return chunk;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> span(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return new Pair<>(takeWhile(p, xs), dropWhile(p, xs));
}
public static @NotNull Pair<String, String> span(@NotNull Predicate<Character> p, @NotNull String s) {
return new Pair<>(takeWhile(p, s), dropWhile(p, s));
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> breakIterable(Predicate<T> p, Iterable<T> xs) {
return span(p.negate(), xs);
}
public static @NotNull Pair<String, String> breakString(@NotNull Predicate<Character> p, @NotNull String s) {
return span(p.negate(), s);
}
public static @NotNull <T> Optional<Iterable<T>> stripPrefix(Iterable<T> prefix, Iterable<T> xs) {
return isPrefixOf(prefix, xs) ? Optional.of(take(length(prefix), xs)) : Optional.<Iterable<T>>empty();
}
public static @NotNull <T> Iterable<Pair<T, Integer>> countAdjacent(@NotNull Iterable<T> xs) {
return new Iterable<Pair<T, Integer>>() {
@Override
public Iterator<Pair<T, Integer>> iterator() {
return new Iterator<Pair<T, Integer>>() {
private Iterator<T> xsi = xs.iterator();
private boolean hasNext = xsi.hasNext();
private boolean isLast = false;
private T nextX = null;
private Pair<T, Integer> next = null;
{
if (hasNext) {
nextX = xsi.next();
}
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public Pair<T, Integer> next() {
if (isLast) {
hasNext = false;
return next;
} else {
Pair<T, Integer> oldNext = next;
advance();
return oldNext;
}
}
private void advance() {
T original = nextX;
int count = 0;
do {
count++;
if (!xsi.hasNext()) {
isLast = true;
break;
}
nextX = xsi.next();
} while (Objects.equals(original, nextX));
next = new Pair<>(original, count);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull Iterable<Pair<Character, Integer>> countAdjacent(@NotNull String s) {
return countAdjacent(fromString(s));
}
public static @NotNull <T> Iterable<List<T>> group(@NotNull Iterable<T> xs) {
return group(p -> Objects.equals(p.a, p.b), xs);
}
public static @NotNull <T> Iterable<String> group(@NotNull String s) {
return group(p -> p.a == p.b, s);
}
public static <T> boolean isPrefixOf(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
Iterator<T> xsi = xs.iterator();
Iterator<T> ysi = ys.iterator();
while (xsi.hasNext()) {
if (!ysi.hasNext()) return false;
T x = xsi.next();
T y = ysi.next();
if (!Objects.equals(x, y)) return false;
}
return true;
}
public static boolean isPrefixOf(@NotNull String s, @NotNull String t) {
return s.length() <= t.length() && s.substring(0, t.length()).equals(t);
}
public static <T> boolean isSuffixOf(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
return isPrefixOf(reverse(xs), reverse(ys));
}
public static boolean isSuffixOf(@NotNull String s, @NotNull String t) {
return s.length() <= t.length() && s.substring(t.length() - s.length()).equals(t);
}
public static @NotNull <T> Iterable<List<T>> windows(int size, @NotNull Iterable<T> xs) {
List<T> firstWindow = toList(take(size, xs));
if (firstWindow.size() < size) return new ArrayList<>();
return cons(firstWindow, () -> new Iterator<List<T>>() {
Iterator<T> xsi = drop(size, xs).iterator();
List<T> previousWindow = firstWindow;
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public List<T> next() {
previousWindow = toList(concat(tail(previousWindow), Arrays.asList(xsi.next())));
return previousWindow;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
});
}
public static @NotNull Iterable<String> windows(int size, @NotNull String s) {
String firstWindow = take(size, s);
if (firstWindow.length() < size) return new ArrayList<>();
return cons(firstWindow, () -> new Iterator<String>() {
Iterator<Character> xsi = fromString(drop(size, s)).iterator();
String previousWindow = firstWindow;
@Override
public boolean hasNext() {
return xsi.hasNext();
}
@Override
public String next() {
previousWindow = concat(tail(previousWindow), Character.toString(xsi.next()));
return previousWindow;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
});
}
public static <T> boolean isInfixOf(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
return any(zs -> equal(xs, zs), windows(length(xs), ys));
}
public static boolean isInfixOf(@NotNull String s, @NotNull String t) {
return t.contains(s);
}
public static @NotNull <T> Iterable<T> mux(@NotNull List<Iterable<T>> xss) {
return concat(map(list -> list, transpose(xss)));
}
public static @NotNull String muxStrings(@NotNull List<String> xss) {
return concatStrings(transposeStrings(xss));
}
public static @NotNull <T> List<Iterable<T>> demux(int lines, @NotNull Iterable<T> xs) {
List<Iterable<T>> demuxed = new ArrayList<>();
for (int i = 0; i < lines; i++) {
Iterable<Boolean> mask = concat(
replicate(i, false),
cycle(cons(true, (Iterable<Boolean>) replicate(lines - 1, false)))
);
demuxed.add(select(mask, xs));
}
return demuxed;
}
public static @NotNull List<String> demux(int lines, @NotNull String s) {
List<String> demuxed = new ArrayList<>();
for (int i = 0; i < lines; i++) {
Iterable<Boolean> mask = concat(
replicate(i, false),
cycle(cons(true, (Iterable<Boolean>) replicate(lines - 1, false)))
);
demuxed.add(select(mask, s));
}
return demuxed;
}
public static @NotNull <T> Optional<T> find(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
for (T x : xs) {
if (p.test(x)) return Optional.of(x);
}
return Optional.empty();
}
public static @NotNull Optional<Character> find(@NotNull Predicate<Character> p, @NotNull String s) {
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (p.test(c)) return Optional.of(c);
}
return Optional.empty();
}
public static @NotNull <T> Iterable<T> filter(@NotNull Predicate<T> p, @NotNull Iterable<T> xs) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
while (xsi.hasNext()) {
next = xsi.next();
if (p.test(next)) {
hasNext = true;
return;
}
}
hasNext = false;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String filter(@NotNull Predicate<Character> p, @NotNull String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (p.test(c)) sb.append(c);
}
return sb.toString();
}
public static @NotNull <T> Pair<Iterable<T>, Iterable<T>> partition(
@NotNull Predicate<T> p,
@NotNull Iterable<T> xs) {
return new Pair<>(filter(p, xs), filter(x -> !p.test(x), xs));
}
public static @NotNull Pair<String, String> partition(@NotNull Predicate<Character> p, @NotNull String s) {
StringBuilder sba = new StringBuilder();
StringBuilder sbb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
(p.test(c) ? sba : sbb).append(c);
}
return new Pair<>(sba.toString(), sbb.toString());
}
public static <T> T get(Iterable<T> xs, int i) {
if (i < 0)
throw new IndexOutOfBoundsException();
Iterator<T> xsi = xs.iterator();
T element = null;
for (int j = 0; j <= i; j++) {
if (!xsi.hasNext())
throw new IndexOutOfBoundsException();
element = xsi.next();
}
return element;
}
public static <T> T get(Iterable<T> xs, BigInteger i) {
if (lt(i, BigInteger.ZERO))
throw new IndexOutOfBoundsException();
Iterator<T> xsi = xs.iterator();
T element = null;
for (BigInteger j : range(BigInteger.ONE, i)) {
if (!xsi.hasNext())
throw new IndexOutOfBoundsException();
element = xsi.next();
}
return element;
}
public static <T> T get(List<T> xs, int i) {
return xs.get(i);
}
public static char get(String s, int i) {
return s.charAt(i);
}
public static <T> Iterable<T> select(Iterable<Boolean> bs, Iterable<T> xs) {
return map(p -> p.b, filter(p -> p.a, (Iterable<Pair<Boolean, T>>) zip(bs, xs)));
}
public static <T> String select(Iterable<Boolean> bs, String s) {
return charsToString(
map(p -> p.b, filter(p -> p.a, (Iterable<Pair<Boolean, Character>>) zip(bs, fromString(s))))
);
}
public static <A, B> Iterable<Pair<A, B>> zip(Iterable<A> as, Iterable<B> bs) {
return () -> new Iterator<Pair<A, B>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() && bsi.hasNext();
}
@Override
public Pair<A, B> next() {
return new Pair<>(asi.next(), bsi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C> Iterable<Triple<A, B, C>> zip3(Iterable<A> as, Iterable<B> bs, Iterable<C> cs) {
return () -> new Iterator<Triple<A, B, C>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() && bsi.hasNext() && csi.hasNext();
}
@Override
public Triple<A, B, C> next() {
return new Triple<>(asi.next(), bsi.next(), csi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D> Iterable<Quadruple<A, B, C, D>> zip4(
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds
) {
return () -> new Iterator<Quadruple<A, B, C, D>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() && bsi.hasNext() && csi.hasNext() && dsi.hasNext();
}
@Override
public Quadruple<A, B, C, D> next() {
return new Quadruple<>(asi.next(), bsi.next(), csi.next(), dsi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E> Iterable<Quintuple<A, B, C, D, E>> zip5(
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es
) {
return () -> new Iterator<Quintuple<A, B, C, D, E>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() && bsi.hasNext() && csi.hasNext() && dsi.hasNext() && esi.hasNext();
}
@Override
public Quintuple<A, B, C, D, E> next() {
return new Quintuple<>(asi.next(), bsi.next(), csi.next(), dsi.next(), esi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E, F> Iterable<Sextuple<A, B, C, D, E, F>> zip6(
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs
) {
return () -> new Iterator<Sextuple<A, B, C, D, E, F>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
private final Iterator<F> fsi = fs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() &&
bsi.hasNext() &&
csi.hasNext() &&
dsi.hasNext() &&
esi.hasNext() &&
fsi.hasNext();
}
@Override
public Sextuple<A, B, C, D, E, F> next() {
return new Sextuple<>(asi.next(), bsi.next(), csi.next(), dsi.next(), esi.next(), fsi.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E, F, G> Iterable<Septuple<A, B, C, D, E, F, G>> zip7(
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs,
Iterable<G> gs
) {
return () -> new Iterator<Septuple<A, B, C, D, E, F, G>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
private final Iterator<F> fsi = fs.iterator();
private final Iterator<G> gsi = gs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() &&
bsi.hasNext() &&
csi.hasNext() &&
dsi.hasNext() &&
esi.hasNext() &&
fsi.hasNext() &&
gsi.hasNext();
}
@Override
public Septuple<A, B, C, D, E, F, G> next() {
return new Septuple<>(
asi.next(),
bsi.next(),
csi.next(),
dsi.next(),
esi.next(),
fsi.next(),
gsi.next()
);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B> Iterable<Pair<A, B>> zipPadded(A aPad, B bPad, Iterable<A> as, Iterable<B> bs) {
return () -> new Iterator<Pair<A, B>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() || bsi.hasNext();
}
@Override
public Pair<A, B> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
return new Pair<>(a, b);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C> Iterable<Triple<A, B, C>> zip3Padded(
A aPad,
B bPad,
C cPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs
) {
return () -> new Iterator<Triple<A, B, C>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() || bsi.hasNext() || csi.hasNext();
}
@Override
public Triple<A, B, C> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
return new Triple<>(a, b, c);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D> Iterable<Quadruple<A, B, C, D>> zip4Padded(
A aPad,
B bPad,
C cPad,
D dPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds
) {
return () -> new Iterator<Quadruple<A, B, C, D>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() || bsi.hasNext() || csi.hasNext() || dsi.hasNext();
}
@Override
public Quadruple<A, B, C, D> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
D d = dsi.hasNext() ? dsi.next() : dPad;
return new Quadruple<>(a, b, c, d);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E> Iterable<Quintuple<A, B, C, D, E>> zip5Padded(
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es) {
return () -> new Iterator<Quintuple<A, B, C, D, E>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() || bsi.hasNext() || csi.hasNext() || dsi.hasNext() || esi.hasNext();
}
@Override
public Quintuple<A, B, C, D, E> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
D d = dsi.hasNext() ? dsi.next() : dPad;
E e = esi.hasNext() ? esi.next() : ePad;
return new Quintuple<>(a, b, c, d, e);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E, F> Iterable<Sextuple<A, B, C, D, E, F>> zip6Padded(
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
F fPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs) {
return () -> new Iterator<Sextuple<A, B, C, D, E, F>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
private final Iterator<F> fsi = fs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() ||
bsi.hasNext() ||
csi.hasNext() ||
dsi.hasNext() ||
esi.hasNext() ||
fsi.hasNext();
}
@Override
public Sextuple<A, B, C, D, E, F> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
D d = dsi.hasNext() ? dsi.next() : dPad;
E e = esi.hasNext() ? esi.next() : ePad;
F f = fsi.hasNext() ? fsi.next() : fPad;
return new Sextuple<>(a, b, c, d, e, f);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, C, D, E, F, G> Iterable<Septuple<A, B, C, D, E, F, G>> zip7Padded(
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
F fPad,
G gPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs,
Iterable<G> gs) {
return () -> new Iterator<Septuple<A, B, C, D, E, F, G>>() {
private final Iterator<A> asi = as.iterator();
private final Iterator<B> bsi = bs.iterator();
private final Iterator<C> csi = cs.iterator();
private final Iterator<D> dsi = ds.iterator();
private final Iterator<E> esi = es.iterator();
private final Iterator<F> fsi = fs.iterator();
private final Iterator<G> gsi = gs.iterator();
@Override
public boolean hasNext() {
return asi.hasNext() ||
bsi.hasNext() ||
csi.hasNext() ||
dsi.hasNext() ||
esi.hasNext() ||
fsi.hasNext() ||
gsi.hasNext();
}
@Override
public Septuple<A, B, C, D, E, F, G> next() {
A a = asi.hasNext() ? asi.next() : aPad;
B b = bsi.hasNext() ? bsi.next() : bPad;
C c = csi.hasNext() ? csi.next() : cPad;
D d = dsi.hasNext() ? dsi.next() : dPad;
E e = esi.hasNext() ? esi.next() : ePad;
F f = fsi.hasNext() ? fsi.next() : fPad;
G g = gsi.hasNext() ? gsi.next() : gPad;
return new Septuple<>(a, b, c, d, e, f, g);
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
public static <A, B, O> Iterable<O> zipWith(
Function<Pair<A, B>, O> f,
Iterable<A> as,
Iterable<B> bs
) {
return map(f, zip(as, bs));
}
public static <A, B, C, O> Iterable<O> zipWith3(
Function<Triple<A, B, C>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs
) {
return map(f, zip3(as, bs, cs));
}
public static <A, B, C, D, O> Iterable<O> zipWith4(
Function<Quadruple<A, B, C, D>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds
) {
return map(f, zip4(as, bs, cs, ds));
}
public static <A, B, C, D, E, O> Iterable<O> zipWith5(
Function<Quintuple<A, B, C, D, E>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es
) {
return map(f, zip5(as, bs, cs, ds, es));
}
public static <A, B, C, D, E, F, O> Iterable<O> zipWith6(
Function<Sextuple<A, B, C, D, E, F>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs
) {
return map(f, zip6(as, bs, cs, ds, es, fs));
}
public static <A, B, C, D, E, F, G, O> Iterable<O> zipWith6(
Function<Septuple<A, B, C, D, E, F, G>, O> f,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs,
Iterable<G> gs
) {
return map(f, zip7(as, bs, cs, ds, es, fs, gs));
}
public static <A, B, O> Iterable<O> zipWithPadded(
Function<Pair<A, B>, O> f,
A aPad,
B bPad,
Iterable<A> as,
Iterable<B> bs
) {
return map(f, zipPadded(aPad, bPad, as, bs));
}
public static <A, B, C, O> Iterable<O> zipWith3Padded(
Function<Triple<A, B, C>, O> f,
A aPad,
B bPad,
C cPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs
) {
return map(f, zip3Padded(aPad, bPad, cPad, as, bs, cs));
}
public static <A, B, C, D, O> Iterable<O> zipWith4Padded(
Function<Quadruple<A, B, C, D>, O> f,
A aPad,
B bPad,
C cPad,
D dPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds
) {
return map(f, zip4Padded(aPad, bPad, cPad, dPad, as, bs, cs, ds));
}
public static <A, B, C, D, E, O> Iterable<O> zipWith5Padded(
Function<Quintuple<A, B, C, D, E>, O> f,
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es
) {
return map(f, zip5Padded(aPad, bPad, cPad, dPad, ePad, as, bs, cs, ds, es));
}
public static <A, B, C, D, E, F, O> Iterable<O> zipWith6Padded(
Function<Sextuple<A, B, C, D, E, F>, O> f,
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
F fPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs
) {
return map(f, zip6Padded(aPad, bPad, cPad, dPad, ePad, fPad, as, bs, cs, ds, es, fs));
}
public static <A, B, C, D, E, F, G, O> Iterable<O> zipWith7Padded(
Function<Septuple<A, B, C, D, E, F, G>, O> f,
A aPad,
B bPad,
C cPad,
D dPad,
E ePad,
F fPad,
G gPad,
Iterable<A> as,
Iterable<B> bs,
Iterable<C> cs,
Iterable<D> ds,
Iterable<E> es,
Iterable<F> fs,
Iterable<G> gs
) {
return map(f, zip7Padded(aPad, bPad, cPad, dPad, ePad, fPad, gPad, as, bs, cs, ds, es, fs, gs));
}
public static <A, B> Pair<Iterable<A>, Iterable<B>> unzip(Iterable<Pair<A, B>> ps) {
return new Pair<>(
map(p -> p.a, ps),
map(p -> p.b, ps)
);
}
public static <A, B, C> Triple<Iterable<A>, Iterable<B>, Iterable<C>> unzip3(Iterable<Triple<A, B, C>> ps) {
return new Triple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps)
);
}
public static <A, B, C, D> Quadruple<
Iterable<A>,
Iterable<B>,
Iterable<C>,
Iterable<D>
> unzip4(Iterable<Quadruple<A, B, C, D>> ps) {
return new Quadruple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps),
map(p -> p.d, ps)
);
}
public static <A, B, C, D, E> Quintuple<
Iterable<A>,
Iterable<B>,
Iterable<C>,
Iterable<D>,
Iterable<E>
> unzip5(Iterable<Quintuple<A, B, C, D, E>> ps) {
return new Quintuple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps),
map(p -> p.d, ps),
map(p -> p.e, ps)
);
}
public static <A, B, C, D, E, F> Sextuple<
Iterable<A>,
Iterable<B>,
Iterable<C>,
Iterable<D>,
Iterable<E>,
Iterable<F>
> unzip6(Iterable<Sextuple<A, B, C, D, E, F>> ps) {
return new Sextuple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps),
map(p -> p.d, ps),
map(p -> p.e, ps),
map(p -> p.f, ps)
);
}
public static <A, B, C, D, E, F, G> Septuple<
Iterable<A>,
Iterable<B>,
Iterable<C>,
Iterable<D>,
Iterable<E>,
Iterable<F>,
Iterable<G>
> unzip7(Iterable<Septuple<A, B, C, D, E, F, G>> ps) {
return new Septuple<>(
map(p -> p.a, ps),
map(p -> p.b, ps),
map(p -> p.c, ps),
map(p -> p.d, ps),
map(p -> p.e, ps),
map(p -> p.f, ps),
map(p -> p.g, ps)
);
}
public static @NotNull <T> Iterable<T> nub(@NotNull Iterable<T> xs) {
return new Iterable<T>() {
private Set<T> seen = new HashSet<>();
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
while (xsi.hasNext()) {
next = xsi.next();
if (!seen.contains(next)) {
seen.add(next);
hasNext = true;
return;
}
}
hasNext = false;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String nub(@NotNull String s) {
Set<Character> seen = new HashSet<>();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (!seen.contains(c)) {
seen.add(c);
sb.append(c);
}
}
return sb.toString();
}
public static <T> boolean isSubsetOf(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
HashSet<T> set = new HashSet<>();
addTo(xs, set);
for (T y : ys) {
set.remove(y);
if (set.isEmpty()) return true;
}
return false;
}
public static <T> boolean isSubsetOf(@NotNull String s, @NotNull String t) {
return isSubsetOf(fromString(s), fromString(t));
}
public static @NotNull <T extends Comparable<T>> List<T> sort(@NotNull Iterable<T> xss) {
List<T> list = toList(xss);
Collections.sort(list);
return list;
}
public static @NotNull String sort(@NotNull String s) {
List<Character> list = toList(s);
Collections.sort(list);
return charsToString(list);
}
public static @NotNull <T> Iterable<T> nub(@NotNull Predicate<Pair<T, T>> p, @NotNull Iterable<T> xs) {
return new Iterable<T>() {
private Set<T> seen = new HashSet<>();
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private final Iterator<T> xsi = xs.iterator();
private T next;
private boolean hasNext;
{
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
T current = next;
advance();
return current;
}
private void advance() {
while (xsi.hasNext()) {
next = xsi.next();
boolean good = !seen.contains(next) && !any(x -> p.test(new Pair<T, T>(next, x)), seen);
if (good) {
seen.add(next);
hasNext = true;
return;
}
}
hasNext = false;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull String nub(@NotNull Predicate<Pair<Character, Character>> p, @NotNull String s) {
Set<Character> seen = new HashSet<>();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (!seen.contains(c) && !any(x -> p.test(new Pair<Character, Character>(c, x)), seen)) {
seen.add(c);
sb.append(c);
}
}
return sb.toString();
}
public static @NotNull <T extends Comparable<T>> List<T> sort(
@NotNull Comparator<T> comparator,
@NotNull Iterable<T> xss
) {
List<T> list = toList(xss);
Collections.sort(list, comparator);
return list;
}
public static @NotNull String sort(@NotNull Comparator<Character> comparator, @NotNull String s) {
List<Character> list = toList(s);
Collections.sort(list, comparator);
return charsToString(list);
}
public static @NotNull <T extends Comparable<T>> T maximum(
@NotNull Comparator<T> comparator,
@NotNull Iterable<T> xs
) {
return foldl1(p -> max(comparator, p.a, p.b), xs);
}
public static char maximum(@NotNull Comparator<Character> comparator, @NotNull String s) {
return foldl1(p -> max(comparator, p.a, p.b), fromString(s));
}
public static @NotNull <T extends Comparable<T>> T minimum(
@NotNull Comparator<T> comparator,
@NotNull Iterable<T> xs
) {
return foldl1(p -> min(comparator, p.a, p.b), xs);
}
public static char minimum(@NotNull Comparator<Character> comparator, @NotNull String s) {
return foldl1(p -> min(comparator, p.a, p.b), fromString(s));
}
public static @NotNull <T> Iterable<List<T>> group(
@NotNull Predicate<Pair<T, T>> p,
@NotNull Iterable<T> xs
) {
return new Iterable<List<T>>() {
@Override
public Iterator<List<T>> iterator() {
return new Iterator<List<T>>() {
private Iterator<T> xsi = xs.iterator();
private boolean hasNext = xsi.hasNext();
private boolean isLast = false;
private T nextX = null;
private List<T> next = null;
{
if (hasNext) {
nextX = xsi.next();
}
advance();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public List<T> next() {
if (isLast) {
hasNext = false;
return next;
} else {
List<T> oldNext = next;
advance();
return oldNext;
}
}
private void advance() {
T original = nextX;
List<T> list = new ArrayList<>();
do {
list.add(nextX);
if (!xsi.hasNext()) {
isLast = true;
break;
}
nextX = xsi.next();
} while (p.test(new Pair<T, T>(original, nextX)));
next = list;
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove from this iterator");
}
};
}
};
}
public static @NotNull Iterable<String> group(
@NotNull Predicate<Pair<Character, Character>> p,
@NotNull String s
) {
return map(IterableUtils::charsToString, group(p, fromString(s)));
}
public static <T> boolean equal(@NotNull Iterable<T> xs, @NotNull Iterable<T> ys) {
Iterator<T> xsi = xs.iterator();
Iterator<T> ysi = ys.iterator();
while (xsi.hasNext()) {
if (!ysi.hasNext()) return false;
T x = xsi.next();
T y = ysi.next();
if (!Objects.equals(x, y)) return false;
}
return !ysi.hasNext();
}
}
|
added inits and tails
|
src/main/java/mho/wheels/iterables/IterableUtils.java
|
added inits and tails
|
|
Java
|
mit
|
a3bfcefdca61daa9a205b9ddc363adaef72b8553
| 0
|
MylesIsCool/ViaVersion,Matsv/ViaVersion
|
package us.myles.ViaVersion.commands;
import lombok.RequiredArgsConstructor;
import org.bukkit.Bukkit;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import us.myles.ViaVersion.ViaVersionPlugin;
import us.myles.ViaVersion.api.ViaVersion;
import java.util.ArrayList;
import java.util.List;
/**
* Created by fillefilip8 on 2016-03-03.
*/
@RequiredArgsConstructor
public class ViaVersionCommand implements CommandExecutor {
private final ViaVersionPlugin plugin;
@Override
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) {
if (sender.hasPermission("viaversion.admin")) {
if (args.length == 0) {
sender.sendMessage(color("&aViaVersion &c" + ViaVersion.getInstance().getVersion()));
sender.sendMessage(color("&6Commands:"));
sender.sendMessage(color("&2/viaversion list &7- &6Shows lists of all 1.9 clients and 1.8 clients."));
sender.sendMessage(color("&2/viaversion autoteam &7- &6Toggle automatically teaming to prevent colliding."));
sender.sendMessage(color("&2/viaversion dontbugme &7- &6Toggle checking for updates."));
} else if (args.length == 1) {
if (args[0].equalsIgnoreCase("list")) {
List<String> portedPlayers = new ArrayList<>();
List<String> normalPlayers = new ArrayList<>();
for (Player p : Bukkit.getOnlinePlayers()) {
if (ViaVersion.getInstance().isPorted(p)) {
portedPlayers.add(p.getName());
} else {
normalPlayers.add(p.getName());
}
}
sender.sendMessage(color("&8[&61.9&8]: &b" + portedPlayers.toString()));
sender.sendMessage(color("&8[&61.8&8]: &b" + normalPlayers.toString()));
} else if (args[0].equalsIgnoreCase("debug")) {
plugin.setDebug(!plugin.isDebug());
sender.sendMessage(color("&6Debug mode is now " + (plugin.isDebug() ? "&aenabled" : "&cdisabled")));
} else if (args[0].equalsIgnoreCase("dontbugme")) {
boolean newValue = !plugin.getConfig().getBoolean("checkforupdates", true);
plugin.getConfig().set("checkforupdates", newValue);
plugin.saveConfig();
sender.sendMessage(color("&6We will " + (newValue ? "&anotify you about updates." : "&cnot tell you about updates.")));
} else if (args[0].equalsIgnoreCase("autoteam")) {
boolean newValue = !plugin.getConfig().getBoolean("auto-team", true);
plugin.getConfig().set("auto-team", newValue);
plugin.saveConfig();
sender.sendMessage(color("&6We will " + (newValue ? "&aautomatically team players" : "&cno longer auto team players")));
sender.sendMessage(color("&6All players will need to re-login for the change to take place."));
} else {
sender.sendMessage(color("&aViaVersion &c" + ViaVersion.getInstance().getVersion()));
sender.sendMessage(color("&6Commands:"));
sender.sendMessage(color("&2/viaversion list &7- &6Shows lists of all 1.9 clients and 1.8 clients."));
sender.sendMessage(color("&2/viaversion autoteam &7- &6Toggle automatically teaming to prevent colliding."));
sender.sendMessage(color("&2/viaversion dontbugme &7- &6Toggle checking for updates."));
}
}
}
return false;
}
public String color(String string) {
return string.replace("&", "§");
}
}
|
src/main/java/us/myles/ViaVersion/commands/ViaVersionCommand.java
|
package us.myles.ViaVersion.commands;
import lombok.RequiredArgsConstructor;
import org.bukkit.Bukkit;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import us.myles.ViaVersion.ViaVersionPlugin;
import us.myles.ViaVersion.api.ViaVersion;
import java.util.ArrayList;
import java.util.List;
/**
* Created by fillefilip8 on 2016-03-03.
*/
@RequiredArgsConstructor
public class ViaVersionCommand implements CommandExecutor {
private final ViaVersionPlugin plugin;
@Override
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) {
if (sender.hasPermission("viaversion.admin")) {
if (args.length == 0) {
sender.sendMessage(color("&aViaVersion &c" + ViaVersion.getInstance().getVersion()));
sender.sendMessage(color("&6Commands:"));
sender.sendMessage(color("&2/viaversion list &7- &6Shows lists of all 1.9 clients and 1.8 clients."));
sender.sendMessage(color("&2/viaversion autoteam &7- &6Toggle automatically teaming to prevent colliding."));
sender.sendMessage(color("&2/viaversion dontbugme &7- &6Toggle checking for updates."));
} else if (args.length == 1) {
if (args[0].equalsIgnoreCase("list")) {
List<String> portedPlayers = new ArrayList<>();
List<String> normalPlayers = new ArrayList<>();
for (Player p : Bukkit.getOnlinePlayers()) {
if (ViaVersion.getInstance().isPorted(p)) {
portedPlayers.add(p.getName());
} else {
normalPlayers.add(p.getName());
}
}
sender.sendMessage(color("&8[&61.9&8]: &b" + portedPlayers.toString()));
sender.sendMessage(color("&8[&61.8&8]: &b" + normalPlayers.toString()));
}
if (args[0].equalsIgnoreCase("debug")) {
plugin.setDebug(!plugin.isDebug());
sender.sendMessage(color("&6Debug mode is now " + (plugin.isDebug() ? "&aenabled" : "&cdisabled")));
}
if (args[0].equalsIgnoreCase("dontbugme")) {
boolean newValue = !plugin.getConfig().getBoolean("checkforupdates", true);
plugin.getConfig().set("checkforupdates", newValue);
plugin.saveConfig();
sender.sendMessage(color("&6We will " + (newValue ? "&anotify you about updates." : "&cnot tell you about updates.")));
}
if (args[0].equalsIgnoreCase("autoteam")) {
boolean newValue = !plugin.getConfig().getBoolean("auto-team", true);
plugin.getConfig().set("auto-team", newValue);
plugin.saveConfig();
sender.sendMessage(color("&6We will " + (newValue ? "&aautomatically team players" : "&cno longer auto team players")));
sender.sendMessage(color("&6All players will need to re-login for the change to take place."));
}
}
}
return false;
}
public String color(String string) {
return string.replace("&", "§");
}
}
|
Added fallback options on CommandHandler
|
src/main/java/us/myles/ViaVersion/commands/ViaVersionCommand.java
|
Added fallback options on CommandHandler
|
|
Java
|
mit
|
6503450cdd1ebfb9e2fa874de6676bbf7531c5b1
| 0
|
The-Dream-Team/Tardis,The-Dream-Team/Tardis
|
package me.dreamteam.tardis;
import java.awt.Canvas;
import java.awt.Dimension;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.image.BufferStrategy;
import javax.swing.JFrame;
import javax.swing.JPanel;
/**
Main Class
*/
public class Game extends Canvas {
/**
* Begin the game parameters that will allow us to define certain elements.
*/
private BufferStrategy strategy;
// This provides hardware acceleration
private boolean isRunning = true;
// Is the game running or not?
private String gameName = "Codename TARDIS ";
private String build = "Alpha ";
private String version = "0.1";
// Version set up so that we can see where we are at
public Game() {
// create a frame to contain our game
JFrame container = new JFrame(gameName + "- " + build + version);
// get hold the content of the frame and set up the resolution of the game
JPanel panel = (JPanel) container.getContentPane();
panel.setPreferredSize(new Dimension(500,650));
// Katie feel free to change this to the dimensions as given in the photoshop document
panel.setLayout(null);
// setup our canvas size and put it into the content of the frame
setBounds(0,0,500,650);
panel.add(this);
// Tell AWT not to bother repainting our canvas since we're
// going to do that our self in accelerated mode
setIgnoreRepaint(true);
// finally make the window visible
container.pack();
container.setResizable(false);
container.setVisible(true);
// add a listener to respond to the user closing the window. If they
// do we'd like to exit the game
container.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
}
/**
* Garbage collection and looping
*/
private void startGame() {
}
public void gameLoop() {
long lastLoopTime = System.currentTimeMillis();
while (isRunning) {
long delta = System.currentTimeMillis() - lastLoopTime;
lastLoopTime = System.currentTimeMillis();
}
}
/**
* Game Start
*/
public static void main(String argv[]) {
Game g =new Game();
// Start the main game loop
g.gameLoop();
}
}
|
src/me/dreamteam/tardis/Game.java
|
package me.dreamteam.tardis;
import java.awt.Canvas;
import java.awt.image.BufferStrategy;
/**
Main Class
*/
public class Game extends Canvas {
/**
* Begin the game parameters that will allow us to define certain elements.
*/
private BufferStrategy strategy;
// This provides hardware acceleration
private boolean isRunning = true;
// Is the game running or not?
}
|
Build the canvas, add more params, game loop, main!!!
|
src/me/dreamteam/tardis/Game.java
|
Build the canvas, add more params, game loop, main!!!
|
|
Java
|
mit
|
ab873c3401b339324862ec80ade55dfabfb74e86
| 0
|
ltearno/pom-explorer,ltearno/pom-explorer,ltearno/pom-explorer,ltearno/pom-explorer
|
package fr.lteconsulting.pomexplorer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.Model;
import org.jboss.shrinkwrap.resolver.api.maven.pom.ParsedPomFile;
import org.jboss.shrinkwrap.resolver.impl.maven.pom.ParsedPomFileImpl;
import fr.lteconsulting.hexa.client.tools.Func1;
import fr.lteconsulting.pomexplorer.changes.Change;
import fr.lteconsulting.pomexplorer.changes.ChangeSetManager;
import fr.lteconsulting.pomexplorer.depanalyze.GavLocation;
import fr.lteconsulting.pomexplorer.depanalyze.Location;
import fr.lteconsulting.pomexplorer.graph.relation.Relation;
public class Tools
{
public static GAV string2Gav( String gavString )
{
String[] parts = gavString.split( ":" );
if( parts.length != 3 )
return null;
GAV gav = new GAV( parts[0], parts[1], parts[2] );
return gav;
}
public static void printChangeList( ILogger log, ChangeSetManager changes )
{
log.html( "<br/>Change list...<br/><br/>" );
List<Change<? extends Location>> changeList = new ArrayList<>();
for( Change<? extends Location> c : changes )
changeList.add( c );
Collections.sort( changeList, new Comparator<Change<? extends Location>>()
{
@Override
public int compare( Change<? extends Location> o1, Change<? extends Location> o2 )
{
Project p1 = o1.getLocation().getProject();
Project p2 = o2.getLocation().getProject();
if( p1 == null && p2 == null )
return 0;
if( p1 == null )
return -1;
if( p2 == null )
return 1;
return p1.getPomFile().getAbsolutePath().compareTo( p2.getPomFile().getAbsolutePath() );
}
} );
for( Change<? extends Location> c : changeList )
{
log.html( c.toString() );
}
}
/***
* Maven tools
*/
public static Set<Location> getDirectDependenciesLocations( WorkingSession session, ILogger log, GAV gav )
{
Set<Location> set = new HashSet<>();
Set<Relation> relations = session.graph().relationsReverse(gav);
for (Relation relation : relations)
{
GAV updatedGav = relation.getSource();
Project updatedProject = session.projects().forGav( updatedGav );
if( updatedProject == null )
{
if( log != null )
log.html( Tools.warningMessage( "Cannot find project for GAV " + updatedGav + " which dependency should be modified ! skipping." ) );
continue;
}
Location dependencyLocation = Tools.findDependencyLocation( session, log, updatedProject, relation );
if( dependencyLocation == null )
{
if( log != null )
log.html( Tools.errorMessage( "Cannot find the location of dependency to " + relation.getTarget() + " in this project " + updatedProject ) );
continue;
}
set.add( dependencyLocation );
}
return set;
}
public static List<String> getMavenProperties( GAV gav )
{
if( gav == null )
return null;
ArrayList<String> res = new ArrayList<>();
if( isMavenVariable( gav.getGroupId() ) )
res.add( extractMavenProperty( gav.getGroupId() ) );
if( isMavenVariable( gav.getArtifactId() ) )
res.add( extractMavenProperty( gav.getArtifactId() ) );
if( isMavenVariable( gav.getVersion() ) )
res.add( extractMavenProperty( gav.getVersion() ) );
return res;
}
public static boolean isMavenVariable( String text )
{
return text != null && text.startsWith( "${" ) && text.endsWith( "}" );
}
private static String extractMavenProperty( String variable )
{
assert isMavenVariable( variable );
return variable.substring( 2, variable.length() - 1 );
}
public static Project getPropertyDefinitionProject( WorkingSession session, Project startingProject, String property )
{
if( property.startsWith( "project." ) )
return startingProject;
// search a property definition in the project. if found, return it
String value = propertyValue( startingProject, property );
if( value != null )
return startingProject;
// go deeper in hierarchy
GAV parentGav = session.graph().parent( startingProject.getGav() );
Project parentProject = null;
if( parentGav != null )
parentProject = session.projects().forGav( parentGav );
if( parentProject != null )
{
Project definition = getPropertyDefinitionProject( session, parentProject, property );
if( definition != null )
return definition;
}
return null;
}
private static String propertyValue( Project startingProject, String property )
{
Object res = startingProject.getUnresolvedPom().getProperties().get( property );
if( res instanceof String )
return (String) res;
return null;
}
public static Location findDependencyLocation(WorkingSession session, ILogger log, Project project, Relation relation)
{
if( project.getGav().equals( relation.getTarget() ) )
return new GavLocation( project, PomSection.PROJECT, project.getGav() );
Location dependencyLocation = null;
switch (relation.getRelationType())
{
case DEPENDENCY:
dependencyLocation = findDependencyLocationInDependencies( session, log, project, relation.getTarget() );
break;
case BUILD_DEPENDENCY:
dependencyLocation = findDependencyLocationInPlugins( session, project, relation.getTarget() );
break;
case PARENT:
dependencyLocation = new GavLocation( project, PomSection.PARENT, relation.getTarget(), relation.getTarget() );
break;
}
return dependencyLocation;
}
public static String getPropertyNameFromPropertyReference( String name )
{
if( !(name.startsWith( "${" ) && name.endsWith( "}" )) )
return name;
return name.substring( 2, name.length() - 1 );
}
public static GavLocation findDependencyLocationInDependencies( WorkingSession session, ILogger log, Project project, GAV searchedDependency )
{
if( project == null )
return null;
// dependencies
GavLocation info = project.getDependencies().get( searchedDependency );
if( info != null && info.getUnresolvedGav() != null && info.getUnresolvedGav().getVersion() != null )
return info;
// dependency management
GavLocation locationInDepMngt = findDependencyLocationInDependencyManagement( session, project, searchedDependency );
if( locationInDepMngt != null )
return locationInDepMngt;
// parent
GAV parentGav = session.graph().parent( project.getGav() );
if( parentGav != null )
{
Project parentProject = session.projects().forGav( parentGav );
if( parentProject == null )
{
log.html( Tools.warningMessage( "Cannot find the '" + project.getGav() + "' parent project '" + parentGav + "' to examine where the dependency '" + searchedDependency + "' is defined." ) );
return null;
}
GavLocation locationInParent = findDependencyLocationInDependencies( session, log, parentProject, searchedDependency );
if( locationInParent != null )
return locationInParent;
}
return null;
}
public static GavLocation findDependencyLocationInDependencyManagement( WorkingSession session, Project project, GAV searchedDependency )
{
if( project.getUnresolvedPom().getDependencyManagement() == null )
return null;
if( project.getUnresolvedPom().getDependencyManagement().getDependencies() == null )
return null;
for( Dependency d : project.getUnresolvedPom().getDependencyManagement().getDependencies() )
{
if( searchedDependency.getGroupId().equals( d.getGroupId() ) && searchedDependency.getArtifactId().equals( d.getArtifactId() ) )
{
GAV g = new GAV( d.getGroupId(), d.getArtifactId(), d.getVersion() );
return new GavLocation( project, PomSection.DEPENDENCY_MNGT, searchedDependency, g );
}
}
return null;
}
public static GavLocation findDependencyLocationInPlugins( WorkingSession session, Project project, GAV searchedPlugin )
{
if( project == null )
return null;
GavLocation info = project.getPluginDependencies().get( searchedPlugin );
if( info != null )
return info;
// TODO search in the plugin management section
// find in parent
return findDependencyLocationInPlugins( session, session.projects().forGav( session.graph().parent( project.getGav() ) ), searchedPlugin );
}
private static Field modelField;
public static Model getParsedPomFileModel( ParsedPomFile parsedPomFile )
{
if( modelField == null )
{
try
{
modelField = ParsedPomFileImpl.class.getDeclaredField( "model" );
modelField.setAccessible( true );
}
catch( NoSuchFieldException | SecurityException | IllegalArgumentException e )
{
e.printStackTrace();
return null;
}
}
try
{
Model model = (Model) modelField.get( parsedPomFile );
return model;
}
catch( IllegalArgumentException | IllegalAccessException e )
{
e.printStackTrace();
return null;
}
}
/**
* Collection utilities
*/
public static <T> List<T> filter( Iterable<T> list, Func1<T, Boolean> predicate )
{
List<T> res = new ArrayList<>();
if( list == null )
return res;
for( T t : list )
if( predicate.exec( t ) )
res.add( t );
return res;
}
public static <T> List<T> filter( T[] list, Func1<T, Boolean> predicate )
{
List<T> res = new ArrayList<>();
if( list == null )
return res;
for( T t : list )
if( predicate.exec( t ) )
res.add( t );
return res;
}
public static final Comparator<GAV> gavAlphabeticalComparator = new Comparator<GAV>()
{
@Override
public int compare( GAV o1, GAV o2 )
{
int r = o1.getGroupId().compareTo( o2.getGroupId() );
if( r != 0 )
return r;
r = o1.getArtifactId().compareTo( o2.getArtifactId() );
if( r != 0 )
return r;
if( o1.getVersion() == null && o2.getVersion() == null )
return 0;
if( o1.getVersion() == null )
return -1;
if( o2.getVersion() == null )
return 1;
r = o1.getVersion().compareTo( o2.getVersion() );
return 0;
}
};
public static String logMessage( String message )
{
return "<span style=''>" + message + "</span><br/>";
}
public static String warningMessage( String message )
{
return "<span style='color:orange;'>" + message + "</span><br/>";
}
public static String successMessage( String message )
{
return "<span style='color:green;'>" + message + "</span><br/>";
}
public static String buildMessage( String message )
{
return "<span style='color:grey;font-size:90%;'>" + message + "</span><br/>";
}
public static String errorMessage( String message )
{
return "<span style='color:red;'>" + message + "</span><br/>";
}
/**
*
*/
private final static String SNAPSHOT_SUFFIX = "-SNAPSHOT";
public static boolean isReleased( GAV gav )
{
return !gav.getVersion().endsWith( SNAPSHOT_SUFFIX );
}
public static GAV releasedGav( GAV gav )
{
if( !isReleased( gav ) )
return new GAV( gav.getGroupId(), gav.getArtifactId(), gav.getVersion().substring( 0, gav.getVersion().length() - SNAPSHOT_SUFFIX.length() ) );
return gav;
}
public static GAV openGavVersion( GAV gav )
{
if( !isReleased( gav ) )
return gav;
String version = gav.getVersion();
int major = 0;
int minor = 0;
int patch = 0;
String[] parts = version.split( "\\." );
if( parts.length > 0 )
{
try
{
major = Integer.parseInt( parts[0] );
}
catch( Exception e )
{
}
}
if( parts.length > 1 )
{
try
{
minor = Integer.parseInt( parts[1] );
}
catch( Exception e )
{
}
}
if( parts.length > 2 )
{
try
{
patch = Integer.parseInt( parts[2] );
}
catch( Exception e )
{
}
}
// new version, hard coded major version upgrade !
major++;
if( parts.length == 3 )
version = String.format( "%1d.%1d.%1d", major, minor, patch );
else if( parts.length == 2 )
version = String.format( "%1d.%1d", major, minor );
else if( parts.length == 1 )
version = String.format( "%1d", major );
else
version += "-open";
return gav.copyWithVersion( version + SNAPSHOT_SUFFIX );
}
/**
* Reads a whole file into a String assuming the file is UTF-8 encoded
*/
public static String readFile( File file )
{
try
{
return new Scanner( file, "UTF-8" ).useDelimiter( "\\A" ).next();
}
catch( FileNotFoundException e )
{
return null;
}
}
public static List<String> readFileLines( String path )
{
ArrayList<String> res = new ArrayList<String>();
File file = new File( path );
if( !file.exists() )
return res;
try
{
BufferedReader in = new BufferedReader( new InputStreamReader( new FileInputStream( file ), "UTF8" ) );
String str;
while( (str = in.readLine()) != null )
{
res.add( str );
}
in.close();
}
catch( Exception e )
{
}
return res;
}
public static void dumpStacktrace(Exception e, ILogger log)
{
Throwable t = e;
if (t instanceof InvocationTargetException)
t = ((InvocationTargetException)t).getTargetException();
log.html(t.toString() + "<br/>");
for (StackTraceElement st : t.getStackTrace())
log.html(st.toString() + "<br/>");
}
}
|
pom-explorer/src/main/java/fr/lteconsulting/pomexplorer/Tools.java
|
package fr.lteconsulting.pomexplorer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.Model;
import org.jboss.shrinkwrap.resolver.api.maven.pom.ParsedPomFile;
import org.jboss.shrinkwrap.resolver.impl.maven.pom.ParsedPomFileImpl;
import fr.lteconsulting.hexa.client.tools.Func1;
import fr.lteconsulting.pomexplorer.changes.Change;
import fr.lteconsulting.pomexplorer.changes.ChangeSetManager;
import fr.lteconsulting.pomexplorer.depanalyze.GavLocation;
import fr.lteconsulting.pomexplorer.depanalyze.Location;
import fr.lteconsulting.pomexplorer.graph.relation.Relation;
public class Tools
{
public static GAV string2Gav( String gavString )
{
String[] parts = gavString.split( ":" );
if( parts.length != 3 )
return null;
GAV gav = new GAV( parts[0], parts[1], parts[2] );
return gav;
}
public static void printChangeList( ILogger log, ChangeSetManager changes )
{
log.html( "<br/>Change list...<br/><br/>" );
List<Change<? extends Location>> changeList = new ArrayList<>();
for( Change<? extends Location> c : changes )
changeList.add( c );
Collections.sort( changeList, new Comparator<Change<? extends Location>>()
{
@Override
public int compare( Change<? extends Location> o1, Change<? extends Location> o2 )
{
Project p1 = o1.getLocation().getProject();
Project p2 = o2.getLocation().getProject();
if( p1 == null && p2 == null )
return 0;
if( p1 == null )
return -1;
if( p2 == null )
return 1;
return p1.getPomFile().getAbsolutePath().compareTo( p2.getPomFile().getAbsolutePath() );
}
} );
for( Change<? extends Location> c : changeList )
{
log.html( c.toString() );
}
}
/***
* Maven tools
*/
public static Set<Location> getDirectDependenciesLocations( WorkingSession session, ILogger log, GAV gav )
{
Set<Location> set = new HashSet<>();
Set<Relation> relations = session.graph().relationsReverse(gav);
for (Relation relation : relations)
{
GAV updatedGav = relation.getSource();
Project updatedProject = session.projects().forGav( updatedGav );
if( updatedProject == null )
{
if( log != null )
log.html( Tools.warningMessage( "Cannot find project for GAV " + updatedGav + " which dependency should be modified ! skipping." ) );
continue;
}
Location dependencyLocation = Tools.findDependencyLocation( session, log, updatedProject, relation );
if( dependencyLocation == null )
{
if( log != null )
log.html( Tools.errorMessage( "Cannot find the location of dependency to " + relation.getTarget() + " in this project " + updatedProject ) );
continue;
}
set.add( dependencyLocation );
}
return set;
}
public static List<String> getMavenProperties( GAV gav )
{
if( gav == null )
return null;
ArrayList<String> res = new ArrayList<>();
if( isMavenVariable( gav.getGroupId() ) )
res.add( extractMavenProperty( gav.getGroupId() ) );
if( isMavenVariable( gav.getArtifactId() ) )
res.add( extractMavenProperty( gav.getArtifactId() ) );
if( isMavenVariable( gav.getVersion() ) )
res.add( extractMavenProperty( gav.getVersion() ) );
return res;
}
public static boolean isMavenVariable( String text )
{
return text != null && text.startsWith( "${" ) && text.endsWith( "}" );
}
private static String extractMavenProperty( String variable )
{
assert isMavenVariable( variable );
return variable.substring( 2, variable.length() - 1 );
}
public static Project getPropertyDefinitionProject( WorkingSession session, Project startingProject, String property )
{
if( property.startsWith( "project." ) )
return startingProject;
// search a property definition in the project. if found, return it
String value = propertyValue( startingProject, property );
if( value != null )
return startingProject;
// go deeper in hierarchy
GAV parentGav = session.graph().parent( startingProject.getGav() );
Project parentProject = null;
if( parentGav != null )
parentProject = session.projects().forGav( parentGav );
if( parentProject != null )
{
Project definition = getPropertyDefinitionProject( session, parentProject, property );
if( definition != null )
return definition;
}
return null;
}
private static String propertyValue( Project startingProject, String property )
{
Object res = startingProject.getUnresolvedPom().getProperties().get( property );
if( res instanceof String )
return (String) res;
return null;
}
public static Location findDependencyLocation(WorkingSession session, ILogger log, Project project, Relation relation)
{
if( project.getGav().equals( relation.getTarget() ) )
return new GavLocation( project, PomSection.PROJECT, project.getGav() );
Location dependencyLocation = null;
switch (relation.getRelationType())
{
case DEPENDENCY:
dependencyLocation = findDependencyLocationInDependencies( session, log, project, relation.getTarget() );
break;
case BUILD_DEPENDENCY:
dependencyLocation = findDependencyLocationInPlugins( session, project, relation.getTarget() );
break;
case PARENT:
dependencyLocation = new GavLocation( project, PomSection.PARENT, relation.getTarget(), relation.getTarget() );
break;
}
return dependencyLocation;
}
public static String getPropertyNameFromPropertyReference( String name )
{
if( !(name.startsWith( "${" ) && name.endsWith( "}" )) )
return name;
return name.substring( 2, name.length() - 1 );
}
public static GavLocation findDependencyLocationInDependencies( WorkingSession session, ILogger log, Project project, GAV searchedDependency )
{
if( project == null )
return null;
// dependencies
GavLocation info = project.getDependencies().get( searchedDependency );
if( info != null && info.getUnresolvedGav() != null && info.getUnresolvedGav().getVersion() != null )
return info;
// dependency management
GavLocation locationInDepMngt = findDependencyLocationInDependencyManagement( session, project, searchedDependency );
if( locationInDepMngt != null )
return locationInDepMngt;
// parent
GAV parentGav = session.graph().parent( project.getGav() );
if( parentGav != null )
{
Project parentProject = session.projects().forGav( parentGav );
if( parentProject == null )
{
log.html( Tools.warningMessage( "Cannot find the '" + project.getGav() + "' parent project '" + parentGav + "' to examine where the dependency '" + searchedDependency + "' is defined." ) );
return null;
}
GavLocation locationInParent = findDependencyLocationInDependencies( session, log, parentProject, searchedDependency );
if( locationInParent != null )
return locationInParent;
}
return null;
}
public static GavLocation findDependencyLocationInDependencyManagement( WorkingSession session, Project project, GAV searchedDependency )
{
if( project.getUnresolvedPom().getDependencyManagement() == null )
return null;
if( project.getUnresolvedPom().getDependencyManagement().getDependencies() == null )
return null;
for( Dependency d : project.getUnresolvedPom().getDependencyManagement().getDependencies() )
{
if( searchedDependency.getGroupId().equals( d.getGroupId() ) && searchedDependency.getArtifactId().equals( d.getArtifactId() ) )
{
GAV g = new GAV( d.getGroupId(), d.getArtifactId(), d.getVersion() );
return new GavLocation( project, PomSection.DEPENDENCY_MNGT, searchedDependency, g );
}
}
return null;
}
public static GavLocation findDependencyLocationInPlugins( WorkingSession session, Project project, GAV searchedPlugin )
{
if( project == null )
return null;
GavLocation info = project.getPluginDependencies().get( searchedPlugin );
if( info != null )
return info;
// TODO search in the plugin management section
// find in parent
return findDependencyLocationInPlugins( session, session.projects().forGav( session.graph().parent( project.getGav() ) ), searchedPlugin );
}
private static Field modelField;
public static Model getParsedPomFileModel( ParsedPomFile parsedPomFile )
{
if( modelField == null )
{
try
{
modelField = ParsedPomFileImpl.class.getDeclaredField( "model" );
modelField.setAccessible( true );
}
catch( NoSuchFieldException | SecurityException | IllegalArgumentException e )
{
e.printStackTrace();
return null;
}
}
try
{
Model model = (Model) modelField.get( parsedPomFile );
return model;
}
catch( IllegalArgumentException | IllegalAccessException e )
{
e.printStackTrace();
return null;
}
}
/**
* Collection utilities
*/
public static <T> List<T> filter( Iterable<T> list, Func1<T, Boolean> predicate )
{
List<T> res = new ArrayList<>();
if( list == null )
return res;
for( T t : list )
if( predicate.exec( t ) )
res.add( t );
return res;
}
public static <T> List<T> filter( T[] list, Func1<T, Boolean> predicate )
{
List<T> res = new ArrayList<>();
if( list == null )
return res;
for( T t : list )
if( predicate.exec( t ) )
res.add( t );
return res;
}
public static final Comparator<GAV> gavAlphabeticalComparator = new Comparator<GAV>()
{
@Override
public int compare( GAV o1, GAV o2 )
{
int r = o1.getGroupId().compareTo( o2.getGroupId() );
if( r != 0 )
return r;
r = o1.getArtifactId().compareTo( o2.getArtifactId() );
if( r != 0 )
return r;
if( o1.getVersion() == null && o2.getVersion() == null )
return 0;
if( o1.getVersion() == null )
return -1;
if( o2.getVersion() == null )
return 1;
r = o1.getVersion().compareTo( o2.getVersion() );
return 0;
}
};
public static String logMessage( String message )
{
return "<span style=''>" + message + "</span><br/>";
}
public static String warningMessage( String message )
{
return "<span style='color:orange;'>" + message + "</span><br/>";
}
public static String successMessage( String message )
{
return "<span style='color:green;'>" + message + "</span><br/>";
}
public static String buildMessage( String message )
{
return "<span style='color:grey;font-size:90%;'>" + message + "</span><br/>";
}
public static String errorMessage( String message )
{
return "<span style='color:red;'>" + message + "</span><br/>";
}
/**
*
*/
private final static String SNAPSHOT_SUFFIX = "-SNAPSHOT";
public static boolean isReleased( GAV gav )
{
return !gav.getVersion().endsWith( SNAPSHOT_SUFFIX );
}
public static GAV releasedGav( GAV gav )
{
if( !isReleased( gav ) )
return new GAV( gav.getGroupId(), gav.getArtifactId(), gav.getVersion().substring( 0, gav.getVersion().length() - SNAPSHOT_SUFFIX.length() ) );
return gav;
}
public static GAV openGavVersion( GAV gav )
{
if( !isReleased( gav ) )
return gav;
String version = gav.getVersion();
int major = 0;
int minor = 0;
int patch = 0;
String[] parts = version.split( "\\." );
if( parts.length > 0 )
{
try
{
major = Integer.parseInt( parts[0] );
}
catch( Exception e )
{
}
}
if( parts.length > 1 )
{
try
{
minor = Integer.parseInt( parts[1] );
}
catch( Exception e )
{
}
}
if( parts.length > 2 )
{
try
{
patch = Integer.parseInt( parts[2] );
}
catch( Exception e )
{
}
}
// new version, hard coded major version upgrade !
major++;
if( parts.length == 3 )
version = String.format( "%1d.%1d.%1d", major, minor, patch );
else if( parts.length == 2 )
version = String.format( "%1d.%1d", major, minor );
else if( parts.length == 1 )
version = String.format( "%1d", major );
else
version += "-open";
return gav.copyWithVersion( version + SNAPSHOT_SUFFIX );
}
/**
* Reads a whole file into a String assuming the file is UTF-8 encoded
*/
public static String readFile( File file )
{
try
{
return new Scanner( file, "UTF-8" ).useDelimiter( "\\A" ).next();
}
catch( FileNotFoundException e )
{
return null;
}
}
public static List<String> readFileLines( String path )
{
ArrayList<String> res = new ArrayList<String>();
File file = new File( path );
if( !file.exists() )
return res;
try
{
BufferedReader in = new BufferedReader( new InputStreamReader( new FileInputStream( file ), "UTF8" ) );
String str;
while( (str = in.readLine()) != null )
{
res.add( str );
}
in.close();
}
catch( Exception e )
{
}
return res;
}
public static void dumpStacktrace(Exception e, ILogger log)
{
Throwable t = e;
if (t instanceof InvocationTargetException)
t = ((InvocationTargetException)t).getTargetException();
log.html("<pre>" + t.toString() + "\r\n");
for (StackTraceElement st : t.getStackTrace())
{
log.html(st.toString() + "\r\n");
}
log.html("</pre>");
}
}
|
better stacktrace dump
|
pom-explorer/src/main/java/fr/lteconsulting/pomexplorer/Tools.java
|
better stacktrace dump
|
|
Java
|
mit
|
3dd97bdb019e6c4eec58bf34120e519fb1441757
| 0
|
AgriCraft/AgriCore
|
package com.agricraft.agricore.util;
import com.agricraft.agricore.core.AgriCore;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import org.reflections.Reflections;
import org.reflections.scanners.ResourcesScanner;
public class ResourceHelper {
/**
* Copies all resources in the class path matching the given name predicate, as well as the file structure predicate to a new directory
*
* @param nameFilter the predicate for the file name
* @param dirFilter the predicate for the file (directory) structure
* @param toFunction function specifying the destination where each file should be copied to
* @param overwrite specifies if files which already exist should be overwritten or not
*/
public static void copyResources(Predicate<String> nameFilter, Predicate<String> dirFilter, Function<String, Path> toFunction, boolean overwrite) {
ResourceHelper helper = new ResourceHelper();
helper.findResources(nameFilter).stream()
.filter(dirFilter)
.forEach(r -> helper.copyResource(r, toFunction.apply(r), overwrite)
);
}
/**
* Reflections instance used to find jsons
*/
private final Reflections reflections;
/**
* The Reflections object can use quite some memory, therefore we instantiate a new one when we need it,
* and discard it afterwards
*/
protected ResourceHelper() {
this.reflections = new Reflections(null, new ResourcesScanner());
}
/**
* Finds all resources in the class path matching the given predicate
* @param nameFilter file name predicate
* @return set of all filenames matching the name filter
*/
protected Set<String> findResources(Predicate<String> nameFilter) {
return this.reflections.getResources(nameFilter::test);
}
/**
* Copies a file from inside the jar to the specified location outside the
* jar, retaining the file name. The default copy action is to not overwrite
* an existing file.
*
* @param from the location of the internal resource.
* @param to the location to copy the resource to.
* @param overwrite if the copy task should overwrite existing files.
*/
protected void copyResource(String from, Path to, boolean overwrite) {
try {
if (overwrite || !Files.exists(to)) {
Files.createDirectories(to.getParent());
Files.copy(this.getResourceAsStream(from), to, StandardCopyOption.REPLACE_EXISTING);
}
} catch (Exception e) {
AgriCore.getLogger("AgriCraft").error(
"Unable to copy Jar resource: \"{0}\" to: \"{1}\"!",
from,
to
);
e.printStackTrace();
}
}
/**
* Retrieves the requested resource by using the current thread's class
* loader or the AgriCore class loader.
*
* @param location the location of the desired resource stream.
* @return the resource, as a stream.
*/
protected InputStream getResourceAsStream(String location) {
InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream(location);
return in != null ? in : ResourceHelper.class.getResourceAsStream(location);
}
}
|
src/main/java/com/agricraft/agricore/util/ResourceHelper.java
|
package com.agricraft.agricore.util;
import com.agricraft.agricore.core.AgriCore;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Set;
import java.util.function.Predicate;
import org.reflections.Reflections;
import org.reflections.scanners.ResourcesScanner;
public class ResourceHelper {
private static final Reflections REFLECTIONS = new Reflections(null, new ResourcesScanner());
public static Set<String> findResources(Predicate<String> nameFilter) {
return REFLECTIONS.getResources(nameFilter::test);
}
/**
* Copies a file from inside the jar to the specified location outside the
* jar, retaining the file name. The default copy action is to not overwrite
* an existing file.
*
* @param from the location of the internal resource.
* @param to the location to copy the resource to.
* @param overwrite if the copy task should overwrite existing files.
*/
public static void copyResource(String from, Path to, boolean overwrite) {
try {
if (overwrite || !Files.exists(to)) {
Files.createDirectories(to.getParent());
Files.copy(getResourceAsStream(from), to, StandardCopyOption.REPLACE_EXISTING);
}
} catch (Exception e) {
AgriCore.getLogger("AgriCraft").error(
"Unable to copy Jar resource: \"{0}\" to: \"{1}\"!",
from,
to
);
e.printStackTrace();
}
}
/**
* Retrieves the requested resource by using the current thread's class
* loader or the AgriCore class loader.
*
* @param location the location of the desired resource stream.
* @return the resource, as a stream.
*/
public static InputStream getResourceAsStream(String location) {
InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream(location);
return in != null ? in : ResourceHelper.class.getResourceAsStream(location);
}
}
|
Optimize ResourceHelper memory usage
Fixes #10
|
src/main/java/com/agricraft/agricore/util/ResourceHelper.java
|
Optimize ResourceHelper memory usage
|
|
Java
|
mit
|
927f086f6adecda5182ed15e13e7e79180b9b1ea
| 0
|
gscrot/gscrot-watermark
|
package gscrot.processor.watermark;
import gscrot.processor.watermark.WatermarkPlugin.Mode;
import gscrot.processor.watermark.WatermarkPlugin.Position;
import iconlib.IconUtils;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.File;
import javax.imageio.ImageIO;
import com.redpois0n.gscrot.GraphicsImageProcessor;
public class WatermarkProcessor extends GraphicsImageProcessor {
public WatermarkProcessor() {
super("Watermark", IconUtils.getIcon("watermark", WatermarkProcessor.class));
}
@Override
public void process(Graphics2D g, int width, int height) {
if (WatermarkPlugin.mode == Mode.TEXT) {
g.setFont(WatermarkPlugin.font);
g.setColor(WatermarkPlugin.foreground);
String s = WatermarkPlugin.string;
if (WatermarkPlugin.position == Position.TOPLEFT) {
g.drawString(s, 10, 10 + g.getFontMetrics().getHeight());
} else if (WatermarkPlugin.position == Position.TOPRIGHT) {
g.drawString(s, width - g.getFontMetrics().stringWidth(s) - 10, 10);
} else if (WatermarkPlugin.position == Position.BOTTOMLEFT) {
g.drawString(s, 10, height - g.getFontMetrics().getHeight());
} else if (WatermarkPlugin.position == Position.BOTTOMRIGHT) {
g.drawString(s, width - g.getFontMetrics().stringWidth(s) - 10, height - g.getFontMetrics().getHeight());
}
} else if (WatermarkPlugin.mode == Mode.IMAGE) {
try {
File file = WatermarkPlugin.file;
if (file != null) {
BufferedImage image = ImageIO.read(file);
if (WatermarkPlugin.position == Position.TOPLEFT) {
g.drawImage(image, 10, 10, null);
} else if (WatermarkPlugin.position == Position.TOPRIGHT) {
g.drawImage(image, width - image.getWidth() - 10, 10, null);
} else if (WatermarkPlugin.position == Position.BOTTOMLEFT) {
g.drawImage(image, 10, height - image.getHeight() - 10, null);
} else if (WatermarkPlugin.position == Position.BOTTOMRIGHT) {
g.drawImage(image, width - image.getWidth() - 10, height - image.getHeight() - 10, null);
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
}
|
src/gscrot/processor/watermark/WatermarkProcessor.java
|
package gscrot.processor.watermark;
import gscrot.processor.watermark.WatermarkPlugin.Mode;
import gscrot.processor.watermark.WatermarkPlugin.Position;
import iconlib.IconUtils;
import java.awt.Graphics2D;
import com.redpois0n.gscrot.GraphicsImageProcessor;
public class WatermarkProcessor extends GraphicsImageProcessor {
public WatermarkProcessor() {
super("Watermark", IconUtils.getIcon("watermark", WatermarkProcessor.class));
}
@Override
public void process(Graphics2D g, int width, int height) {
if (WatermarkPlugin.mode == Mode.TEXT) {
g.setFont(WatermarkPlugin.font);
g.setColor(WatermarkPlugin.foreground);
String s = WatermarkPlugin.string;
if (WatermarkPlugin.position == Position.TOPLEFT) {
g.drawString(s, 10, 10 + g.getFontMetrics().getHeight());
} else if (WatermarkPlugin.position == Position.TOPRIGHT) {
g.drawString(s, width - g.getFontMetrics().stringWidth(s) - 10, 10);
} else if (WatermarkPlugin.position == Position.BOTTOMLEFT) {
g.drawString(s, 10, height - g.getFontMetrics().getHeight());
} else if (WatermarkPlugin.position == Position.BOTTOMRIGHT) {
g.drawString(s, width - g.getFontMetrics().stringWidth(s) - 10, height - g.getFontMetrics().getHeight());
}
} else if (WatermarkPlugin.mode == Mode.IMAGE) {
}
}
}
|
Draw image
|
src/gscrot/processor/watermark/WatermarkProcessor.java
|
Draw image
|
|
Java
|
mit
|
6fb59020510c6b7927d13b395b7fb15d6e7534f1
| 0
|
GuntherDW/inventory-tweaks-liteloader,TerraGamingNetwork/inventory-tweaks,asiekierka/inventory-tweaks,TGNThump/inventory-tweaks,14mRh4X0r/inventory-tweaks,PrinceOfAmber/inventory-tweaks,TerraGamingNetwork/inventory-tweaks,PrinceOfAmber/inventory-tweaks,GuntherDW/inventory-tweaks-liteloader,asiekierka/inventory-tweaks,Vexatos/inventory-tweaks,TGNThump/inventory-tweaks,Kobata/inventory-tweaks,Vexatos/inventory-tweaks,14mRh4X0r/inventory-tweaks,mrammy/inventory-tweaks,mrammy/inventory-tweaks
|
package invtweaks;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.lang.reflect.Method;
import invtweaks.api.ContainerSection;
import invtweaks.api.ContainerGUI;
import invtweaks.api.InventoryGUI;
import net.minecraft.inventory.Container;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.src.InvTweaksObfuscation;
import net.minecraft.inventory.Slot;
public class InvTweaksModCompatibility {
private InvTweaksObfuscation obf;
public InvTweaksModCompatibility(InvTweaksObfuscation obf) {
this.obf = obf;
}
/**
* Returns true if the screen is a chest/dispenser,
* despite not being a GuiChest or a GuiDispenser.
* @param guiContainer
* @return
*/
public boolean isSpecialChest(GuiScreen guiScreen) {
return getContainerGUIAnnotation(guiScreen.getClass()) != null // API-marked classes
|| is(guiScreen, "GuiAlchChest") // Equivalent Exchange
|| is(guiScreen, "GuiCondenser") // Equivalent Exchange
|| is(guiScreen, "GUIChest") // Iron chests (formerly IC2)
|| is(guiScreen, "GuiMultiPageChest") // Multi Page chest
|| is(guiScreen, "GuiGoldSafe") // More Storage
|| is(guiScreen, "GuiLocker")
|| is(guiScreen, "GuiDualLocker")
|| is(guiScreen, "GuiSafe")
|| is(guiScreen, "GuiCabinet")
|| is(guiScreen, "GuiTower")
|| is(guiScreen, "GuiBufferChest") // Red Power 2
|| is(guiScreen, "GuiRetriever") // Red Power 2
|| is(guiScreen, "GuiItemDetect") // Red Power 2
|| is(guiScreen, "GuiAlloyFurnace") // Red Power 2
|| is(guiScreen, "GuiDeploy") // Red Power 2
|| is(guiScreen, "GuiSorter") // Red Power 2
|| is(guiScreen, "GuiFilter") // Red Power 2
|| is(guiScreen, "GuiAdvBench") // Red Power 2
|| is(guiScreen, "GuiEject") // Red Power 2
|| is(guiScreen, "GuiBag") // Red Power 2
|| is(guiScreen, "GuiPersonalChest")
|| is(guiScreen, "GuiNuclearReactor") // IC2
|| is(guiScreen, "GuiEnderChest") // EnderChest
|| is(guiScreen, "GuiColorBox")
|| is(guiScreen, "GuiLinkedColorBox") // ColorBox
|| is(guiScreen, "FC_GuiChest") // Metallurgy
|| is(guiScreen, "FM_GuiMintStorage") // Metallurgy
|| is(guiScreen, "GuiChestTFC") // TerraFirmaCraft
|| is(guiScreen, "GuiBackpack") // Backpacks
;
}
/**
* Returns a special chest row size.
* Given guiContainer must be checked first with isSpecialChest().
* @param guiContainer
* @param defaultValue
* @return
*/
public int getSpecialChestRowSize(GuiContainer guiContainer, int defaultValue) {
ContainerGUI annotation = getContainerGUIAnnotation(guiContainer.getClass());
if(annotation != null) {
Method m = getAnnotatedMethod(guiContainer.getClass(), new Class[] { ContainerGUI.RowSizeCallback.class }, 0, int.class);
if(m != null) {
try {
return (Integer)m.invoke(guiContainer);
} catch(Exception e) {
// TODO: Do something here to tell mod authors they're doing it wrong.
return annotation.rowSize();
}
} else {
return annotation.rowSize();
}
} else if(is(guiContainer, "GuiAlchChest")
|| is(guiContainer, "GuiCondenser")) { // Equivalent Exchange
return 13;
} else if (is(guiContainer, "GUIChest")) { // Iron chests (formerly IC2)
try {
return (Integer)guiContainer.getClass().getMethod("getRowLength").invoke(guiContainer);
} catch (Exception e) {
// Skip it
}
} else if (is(guiContainer, "GuiMultiPageChest")) { // Multi Page chest
return 13;
} else if (is(guiContainer, "GuiLocker") // More Storage
|| is(guiContainer, "GuiDualLocker")
|| is(guiContainer, "GuiTower")) {
return 8;
} else if (is(guiContainer, "GuiBufferChest")) { // Red Power 2
return 4;
} else if (is(guiContainer, "GuiSorter")) {
return 8;
} else if (is(guiContainer, "GuiRetriever")
|| is(guiContainer, "GuiItemDetect")
|| is(guiContainer, "GuiAlloyFurnace")
|| is(guiContainer, "GuiDeploy")
|| is(guiContainer, "GuiFilter")
|| is(guiContainer, "GuiEject")) {
return 3;
} else if (is(guiContainer, "GuiNuclearReactor")) { // IC2
return (obf.getSlots(obf.getContainer(guiContainer)).size() - 36) / 6;
}
return defaultValue;
}
public boolean isChestWayTooBig(GuiScreen guiScreen) {
return is(guiScreen, "GuiAlchChest") // Equivalent Exchange
|| is(guiScreen, "GuiMultiPageChest") // MultiPage Chest
|| is(guiScreen, "GUIChest") // IronChests
|| is(guiScreen, "FC_GuiChest") // Metallurgy
;
}
/**
* Returns true if the screen is the inventory screen, despite not being a GuiInventory.
* @param guiScreen
* @return
*/
public boolean isSpecialInventory(GuiScreen guiScreen) {
if(getInventoryGUIAnnotation(guiScreen.getClass()) != null) {
return true;
}
try {
return obf.getSlots(obf.getContainer(obf.asGuiContainer(guiScreen))).size() > InvTweaksConst.INVENTORY_SIZE
&& !obf.isGuiInventoryCreative(guiScreen);
} catch (Exception e) {
return false;
}
}
@SuppressWarnings("unchecked")
public Map<ContainerSection, List<Slot>> getSpecialContainerSlots(GuiScreen guiScreen, Container container) {
Class<? extends GuiScreen> clazz = guiScreen.getClass();
if(isAPIClass(clazz)) {
Method m = getAnnotatedMethod(clazz, new Class[] { ContainerGUI.ContainerSectionCallback.class, InventoryGUI.ContainerSectionCallback.class }, 0, Map.class);
if(m != null) {
try {
return (Map<ContainerSection, List<Slot>>)m.invoke(guiScreen);
} catch(Exception e) {
// TODO: Do something here to tell mod authors they're doing it wrong.
}
}
}
Map<ContainerSection, List<Slot>> result = new HashMap<ContainerSection, List<Slot>>();
List<Slot> slots = (List<Slot>) obf.getSlots(container);
if (is(guiScreen, "GuiCondenser")) { // EE
result.put(ContainerSection.CHEST, slots.subList(1, slots.size() - 36));
}
else if (is(guiScreen, "GuiAdvBench")) { // RedPower 2
result.put(ContainerSection.CRAFTING_IN, slots.subList(0, 9));
result.put(ContainerSection.CRAFTING_OUT, slots.subList(10, 11));
result.put(ContainerSection.CHEST, slots.subList(11, 29));
} else if(is(guiScreen, "GuiArcaneWorkbench") || is(guiScreen, "GuiInfusionWorkbench")) { // Thaumcraft 3
result.put(ContainerSection.CRAFTING_OUT, slots.subList(0, 1));
result.put(ContainerSection.CRAFTING_IN, slots.subList(2, 11));
}
return result;
}
private static final boolean is(GuiScreen guiScreen, String className) {
try {
return guiScreen.getClass().getSimpleName().contains(className);
}
catch (Exception e) {
return false;
}
}
private static final ContainerGUI getContainerGUIAnnotation(Class<? extends GuiScreen> clazz) {
ContainerGUI annotation = clazz.getAnnotation(ContainerGUI.class);
return annotation;
}
private static final InventoryGUI getInventoryGUIAnnotation(Class<? extends GuiScreen> clazz) {
InventoryGUI annotation = clazz.getAnnotation(InventoryGUI.class);
return annotation;
}
private static final boolean isAPIClass(Class<? extends GuiScreen> clazz) {
return (getContainerGUIAnnotation(clazz) != null) || (getInventoryGUIAnnotation(clazz) != null);
}
private static final Method getAnnotatedMethod(Class clazz, Class[] annotations, int numParams, Class retClass) {
Method[] methods = clazz.getMethods();
for(Method m : methods) {
for(Class annotation : annotations) {
if(m.getAnnotation(annotation) != null) {
if(m.getParameterTypes().length == numParams && retClass.isAssignableFrom(m.getReturnType())) {
return m;
}
}
}
}
return null;
}
}
|
src/minecraft/invtweaks/InvTweaksModCompatibility.java
|
package invtweaks;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.lang.reflect.Method;
import invtweaks.api.ContainerSection;
import invtweaks.api.ContainerGUI;
import invtweaks.api.InventoryGUI;
import net.minecraft.inventory.Container;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.src.InvTweaksObfuscation;
import net.minecraft.inventory.Slot;
public class InvTweaksModCompatibility {
private InvTweaksObfuscation obf;
public InvTweaksModCompatibility(InvTweaksObfuscation obf) {
this.obf = obf;
}
/**
* Returns true if the screen is a chest/dispenser,
* despite not being a GuiChest or a GuiDispenser.
* @param guiContainer
* @return
*/
public boolean isSpecialChest(GuiScreen guiScreen) {
return getContainerGUIAnnotation(guiScreen.getClass()) != null // API-marked classes
|| is(guiScreen, "GuiAlchChest") // Equivalent Exchange
|| is(guiScreen, "GuiCondenser") // Equivalent Exchange
|| is(guiScreen, "GUIChest") // Iron chests (formerly IC2)
|| is(guiScreen, "GuiMultiPageChest") // Multi Page chest
|| is(guiScreen, "GuiGoldSafe") // More Storage
|| is(guiScreen, "GuiLocker")
|| is(guiScreen, "GuiDualLocker")
|| is(guiScreen, "GuiSafe")
|| is(guiScreen, "GuiCabinet")
|| is(guiScreen, "GuiTower")
|| is(guiScreen, "GuiBufferChest") // Red Power 2
|| is(guiScreen, "GuiRetriever") // Red Power 2
|| is(guiScreen, "GuiItemDetect") // Red Power 2
|| is(guiScreen, "GuiAlloyFurnace") // Red Power 2
|| is(guiScreen, "GuiDeploy") // Red Power 2
|| is(guiScreen, "GuiSorter") // Red Power 2
|| is(guiScreen, "GuiFilter") // Red Power 2
|| is(guiScreen, "GuiAdvBench") // Red Power 2
|| is(guiScreen, "GuiEject") // Red Power 2
|| is(guiScreen, "GuiBag") // Red Power 2
|| is(guiScreen, "GuiPersonalChest")
|| is(guiScreen, "GuiNuclearReactor") // IC2
|| is(guiScreen, "GuiEnderChest") // EnderChest
|| is(guiScreen, "GuiColorBox")
|| is(guiScreen, "GuiLinkedColorBox") // ColorBox
|| is(guiScreen, "FC_GuiChest") // Metallurgy
|| is(guiScreen, "FM_GuiMintStorage") // Metallurgy
|| is(guiScreen, "GuiChestTFC") // TerraFirmaCraft
|| is(guiScreen, "GuiBackpack") // Backpacks
;
}
/**
* Returns a special chest row size.
* Given guiContainer must be checked first with isSpecialChest().
* @param guiContainer
* @param defaultValue
* @return
*/
public int getSpecialChestRowSize(GuiContainer guiContainer, int defaultValue) {
ContainerGUI annotation = getContainerGUIAnnotation(guiContainer.getClass());
if(annotation != null) {
Method m = getAnnotatedMethod(guiContainer.getClass(), new Class[] { ContainerGUI.RowSizeCallback.class }, 0, int.class);
if(m != null) {
try {
return (int)m.invoke(guiContainer);
} catch(Exception e) {
// TODO: Do something here to tell mod authors they're doing it wrong.
return annotation.rowSize();
}
} else {
return annotation.rowSize();
}
} else if(is(guiContainer, "GuiAlchChest")
|| is(guiContainer, "GuiCondenser")) { // Equivalent Exchange
return 13;
} else if (is(guiContainer, "GUIChest")) { // Iron chests (formerly IC2)
try {
return (Integer)guiContainer.getClass().getMethod("getRowLength").invoke(guiContainer);
} catch (Exception e) {
// Skip it
}
} else if (is(guiContainer, "GuiMultiPageChest")) { // Multi Page chest
return 13;
} else if (is(guiContainer, "GuiLocker") // More Storage
|| is(guiContainer, "GuiDualLocker")
|| is(guiContainer, "GuiTower")) {
return 8;
} else if (is(guiContainer, "GuiBufferChest")) { // Red Power 2
return 4;
} else if (is(guiContainer, "GuiSorter")) {
return 8;
} else if (is(guiContainer, "GuiRetriever")
|| is(guiContainer, "GuiItemDetect")
|| is(guiContainer, "GuiAlloyFurnace")
|| is(guiContainer, "GuiDeploy")
|| is(guiContainer, "GuiFilter")
|| is(guiContainer, "GuiEject")) {
return 3;
} else if (is(guiContainer, "GuiNuclearReactor")) { // IC2
return (obf.getSlots(obf.getContainer(guiContainer)).size() - 36) / 6;
}
return defaultValue;
}
public boolean isChestWayTooBig(GuiScreen guiScreen) {
return is(guiScreen, "GuiAlchChest") // Equivalent Exchange
|| is(guiScreen, "GuiMultiPageChest") // MultiPage Chest
|| is(guiScreen, "GUIChest") // IronChests
|| is(guiScreen, "FC_GuiChest") // Metallurgy
;
}
/**
* Returns true if the screen is the inventory screen, despite not being a GuiInventory.
* @param guiScreen
* @return
*/
public boolean isSpecialInventory(GuiScreen guiScreen) {
if(getInventoryGUIAnnotation(guiScreen.getClass()) != null) {
return true;
}
try {
return obf.getSlots(obf.getContainer(obf.asGuiContainer(guiScreen))).size() > InvTweaksConst.INVENTORY_SIZE
&& !obf.isGuiInventoryCreative(guiScreen);
} catch (Exception e) {
return false;
}
}
@SuppressWarnings("unchecked")
public Map<ContainerSection, List<Slot>> getSpecialContainerSlots(GuiScreen guiScreen, Container container) {
Class<? extends GuiScreen> clazz = guiScreen.getClass();
if(isAPIClass(clazz)) {
Method m = getAnnotatedMethod(clazz, new Class[] { ContainerGUI.ContainerSectionCallback.class, InventoryGUI.ContainerSectionCallback.class }, 0, Map.class);
if(m != null) {
try {
return (Map<ContainerSection, List<Slot>>)m.invoke(guiScreen);
} catch(Exception e) {
// TODO: Do something here to tell mod authors they're doing it wrong.
}
}
}
Map<ContainerSection, List<Slot>> result = new HashMap<ContainerSection, List<Slot>>();
List<Slot> slots = (List<Slot>) obf.getSlots(container);
if (is(guiScreen, "GuiCondenser")) { // EE
result.put(ContainerSection.CHEST, slots.subList(1, slots.size() - 36));
}
else if (is(guiScreen, "GuiAdvBench")) { // RedPower 2
result.put(ContainerSection.CRAFTING_IN, slots.subList(0, 9));
result.put(ContainerSection.CRAFTING_OUT, slots.subList(10, 11));
result.put(ContainerSection.CHEST, slots.subList(11, 29));
} else if(is(guiScreen, "GuiArcaneWorkbench") || is(guiScreen, "GuiInfusionWorkbench")) { // Thaumcraft 3
result.put(ContainerSection.CRAFTING_OUT, slots.subList(0, 1));
result.put(ContainerSection.CRAFTING_IN, slots.subList(2, 11));
}
return result;
}
private static final boolean is(GuiScreen guiScreen, String className) {
try {
return guiScreen.getClass().getSimpleName().contains(className);
}
catch (Exception e) {
return false;
}
}
private static final ContainerGUI getContainerGUIAnnotation(Class<? extends GuiScreen> clazz) {
ContainerGUI annotation = clazz.getAnnotation(ContainerGUI.class);
return annotation;
}
private static final InventoryGUI getInventoryGUIAnnotation(Class<? extends GuiScreen> clazz) {
InventoryGUI annotation = clazz.getAnnotation(InventoryGUI.class);
return annotation;
}
private static final boolean isAPIClass(Class<? extends GuiScreen> clazz) {
return (getContainerGUIAnnotation(clazz) != null) || (getInventoryGUIAnnotation(clazz) != null);
}
private static final Method getAnnotatedMethod(Class clazz, Class[] annotations, int numParams, Class retClass) {
Method[] methods = clazz.getMethods();
for(Method m : methods) {
for(Class annotation : annotations) {
if(m.getAnnotation(annotation) != null) {
if(m.getParameterTypes().length == numParams && retClass.isAssignableFrom(m.getReturnType())) {
return m;
}
}
}
}
return null;
}
}
|
Set language-level to 6.0, fix the one strange compile error caused by that.
|
src/minecraft/invtweaks/InvTweaksModCompatibility.java
|
Set language-level to 6.0, fix the one strange compile error caused by that.
|
|
Java
|
mit
|
2a0c6596db9cfc7ec10e7e6ee17c5909b793a7b8
| 0
|
msemys/esjc,msemys/esjc
|
package lt.msemys.esjc.util;
import static java.nio.charset.StandardCharsets.UTF_8;
public class Strings {
public static final String EMPTY = "";
public static boolean isNullOrEmpty(String string) {
return string == null || string.isEmpty();
}
public static String newString(byte[] bytes) {
return (bytes == null || bytes.length == 0) ? EMPTY : new String(bytes, UTF_8);
}
public static byte[] toBytes(String string) {
if (string == null) {
return null;
} else if (string.isEmpty()) {
return EmptyArrays.EMPTY_BYTES;
} else {
return string.getBytes(UTF_8);
}
}
public static String defaultIfEmpty(String string, String defaultString) {
return isNullOrEmpty(string) ? defaultString : string;
}
}
|
src/main/java/lt/msemys/esjc/util/Strings.java
|
package lt.msemys.esjc.util;
import static java.nio.charset.StandardCharsets.UTF_8;
public class Strings {
public static final String EMPTY = "";
public static boolean isNullOrEmpty(String string) {
return string == null || string.isEmpty();
}
public static String newString(byte[] bytes) {
return (bytes == null || bytes.length == 0) ? EMPTY : new String(bytes, UTF_8);
}
public static String defaultIfEmpty(String string, String defaultString) {
return isNullOrEmpty(string) ? defaultString : string;
}
}
|
added: string to bytes converter
|
src/main/java/lt/msemys/esjc/util/Strings.java
|
added: string to bytes converter
|
|
Java
|
mit
|
fdc430dd1b6e838bace33ab3bca4278f2dc1e827
| 0
|
lightblueseas/swing-components,lightblueseas/swing-components
|
/**
* The MIT License
*
* Copyright (C) 2015 Asterios Raptis
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package de.alpharogroup.swing.img;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import javax.imageio.ImageIO;
import javax.swing.ImageIcon;
import org.imgscalr.Scalr;
import org.imgscalr.Scalr.Method;
import org.imgscalr.Scalr.Mode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.itextpdf.text.Document;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.Image;
import com.itextpdf.text.pdf.PdfWriter;
import com.itextpdf.text.BaseColor;
import com.itextpdf.text.Phrase;
import com.itextpdf.text.pdf.PdfPCell;
import com.itextpdf.text.pdf.PdfPTable;
import de.alpharogroup.random.RandomExtensions;
/**
* The class {@link ImageExtensions}.
*/
public class ImageExtensions
{
/**
* The Enum Direction.
*/
public enum Direction
{
/** Indicates the horizontal direction. */
horizontal,
/** Indicates the vertical direction. */
vertical
}
/** The logger constant. */
private static final Logger LOG = LoggerFactory.getLogger(ImageExtensions.class.getName());
/**
* Factory method for create a new {@link PdfPTable} with the given count of columns and the column header names
*
* @param numColumns
* the count of columns of the table
* @param headerNames
* the column header names
* @return the new {@link PdfPTable}
*/
public static PdfPTable newPdfPTable(int numColumns, List<String> headerNames) {
PdfPTable table = new PdfPTable(numColumns);
headerNames.stream().forEach(columnHeaderName -> {
PdfPCell header = new PdfPCell();
header.setBackgroundColor(BaseColor.LIGHT_GRAY);
header.setBorderWidth(2);
header.setPhrase(new Phrase(columnHeaderName));
table.addCell(header);
});
return table;
}
/**
* Concatenate the given list of BufferedImage objects to one image and returns the concatenated
* BufferedImage object.
*
* @param imgCollection
* the BufferedImage collection
* @param width
* the width of the image that will be returned.
* @param height
* the height of the image that will be returned.
* @param imageType
* type of the created image
* @param concatenationDirection
* the direction of the concatenation.
* @return the buffered image
*/
public static BufferedImage concatenateImages(final List<BufferedImage> imgCollection,
final int width, final int height, final int imageType,
final Direction concatenationDirection)
{
final BufferedImage img = new BufferedImage(width, height, imageType);
int x = 0;
int y = 0;
for (final BufferedImage bi : imgCollection)
{
final boolean imageDrawn = img.createGraphics().drawImage(bi, x, y, null);
if (!imageDrawn)
{
throw new RuntimeException("BufferedImage could not be drawn:" + bi.toString());
}
if (concatenationDirection.equals(Direction.vertical))
{
y += bi.getHeight();
}
else
{
x += bi.getWidth();
}
}
return img;
}
/**
* Creates from the given Collection of images an pdf file.
*
* @param result
* the output stream from the pdf file where the images shell be written.
* @param images
* the BufferedImage collection to be written in the pdf file.
* @throws DocumentException
* is thrown if an error occurs when trying to get an instance of {@link PdfWriter}.
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static void createPdf(final OutputStream result, final List<BufferedImage> images)
throws DocumentException, IOException
{
final Document document = new Document();
PdfWriter.getInstance(document, result);
for (final BufferedImage image : images)
{
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(image, "png", baos);
final Image img = Image.getInstance(baos.toByteArray());
document.setPageSize(img);
document.newPage();
img.setAbsolutePosition(0, 0);
document.add(img);
}
document.close();
}
/**
* Resize the given BufferedImage and returns the resized BufferedImage.
*
* @param originalImage
* the original image
* @param scalingMethod
* the scaling method
* @param resizeMode
* the resize mode
* @param formatName
* the format name
* @param targetWidth
* the target width
* @param targetHeight
* the target height
* @return the resized
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static BufferedImage getResized(final BufferedImage originalImage,
final Method scalingMethod, final Mode resizeMode, final String formatName,
final int targetWidth, final int targetHeight) throws IOException
{
return read(resize(originalImage, scalingMethod, resizeMode, formatName, targetWidth,
targetHeight));
}
/**
* Resize the given BufferedImage and returns the resized BufferedImage.
*
* @param originalImage
* the original image
* @param formatName
* the format name
* @param targetWidth
* the target width
* @param targetHeight
* the target height
* @return the resized
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static BufferedImage getResized(final BufferedImage originalImage,
final String formatName, final int targetWidth, final int targetHeight) throws IOException
{
return read(resize(originalImage, formatName, targetWidth, targetHeight));
}
/**
* Factory method for create a new {@link ImageIcon}.
*
* @param image
* the file that contains the image
* @return the new {@link ImageIcon}
*/
public static ImageIcon newImageIcon(File image)
{
ImageIcon img = new ImageIcon(image.getAbsolutePath());
return img;
}
/**
* Generates a random {@link BufferedImage} with the given parameters.
*
* @param width
* the width
* @param height
* the height
* @param imageType
* the type of the image
*
* @return The generated {@link BufferedImage}.
*/
public static BufferedImage randomBufferedImage(final int width, final int height,
final int imageType)
{
final BufferedImage img = new BufferedImage(width, height, imageType);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
img.setRGB(x, y, RandomExtensions.newRandomPixel());
}
}
return img;
}
/**
* Gets the buffered image from the given byte array.
*
* @param byteArray
* the byte array
* @return the buffered image
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static BufferedImage read(final byte[] byteArray) throws IOException
{
return ImageIO.read(new ByteArrayInputStream(byteArray));
}
/**
* Gets the buffered image from the given byte array quietly.
*
* @param byteArray
* the byte array
* @return the buffered image or null if the read process failed.
*/
public static BufferedImage readQuietly(final byte[] byteArray)
{
BufferedImage img = null;
try
{
img = read(byteArray);
}
catch (IOException e)
{
LOG.error("Reading image failed.", e);
}
return img;
}
/**
* Gets the buffered image from the given byte array quietly.
*
* @param input
* the input
* @return the buffered image or null if the read process failed.
*/
public static BufferedImage readQuietly(final InputStream input)
{
BufferedImage img = null;
try
{
img = ImageIO.read(input);
}
catch (IOException e)
{
LOG.error("Reading image failed.", e);
}
return img;
}
/**
* Resize the given image.
*
* @param originalImage
* the original image
* @param scalingMethod
* the scaling method
* @param resizeMode
* the resize mode
* @param formatName
* the format name
* @param targetWidth
* the target width
* @param targetHeight
* the target height
* @return the byte[]
*/
public static byte[] resize(final BufferedImage originalImage, final Method scalingMethod,
final Mode resizeMode, final String formatName, final int targetWidth,
final int targetHeight)
{
try
{
final BufferedImage resizedImage = Scalr.resize(originalImage, scalingMethod,
resizeMode, targetWidth, targetHeight);
return toByteArray(resizedImage, formatName);
}
catch (final Exception e)
{
return null;
}
}
/**
* Resize the given BufferedImage.
*
* @param originalImage
* the original image
* @param formatName
* the format name
* @param targetWidth
* the target width
* @param targetHeight
* the target height
* @return the byte[]
*/
public static byte[] resize(final BufferedImage originalImage, final String formatName,
final int targetWidth, final int targetHeight)
{
return resize(originalImage, Scalr.Method.QUALITY, Scalr.Mode.FIT_EXACT, formatName,
targetWidth, targetHeight);
}
/**
* Converts the given BufferedImage to a byte array.
*
* @param bi
* the bi
* @param formatName
* the format name
* @return the byte[]
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static byte[] toByteArray(final BufferedImage bi, final String formatName)
throws IOException
{
try (ByteArrayOutputStream baos = new ByteArrayOutputStream())
{
ImageIO.write(bi, formatName, baos);
baos.flush();
final byte[] byteArray = baos.toByteArray();
return byteArray;
}
}
/**
* Unweave a secret message from the given {@link BufferedImage}.
*
* @param bufferedImage
* the buffered image
* @return the secret message
*/
public static String unweaveFrom(final BufferedImage bufferedImage)
{
final int width = bufferedImage.getWidth();
final int height = bufferedImage.getHeight();
final int messageLength = bufferedImage.getRGB(0, 0) & 0xff;
final StringBuilder sb = new StringBuilder();
for (int row = 0, j = 0, i = 1; row < height; row++)
{
for (int column = 0; column < width && j < messageLength; column++, i++)
{
if (i % 11 == 0)
{
final int result = bufferedImage.getRGB(column, row);
int charAtPosition = (result >> 16 & 0x7) << 5;
charAtPosition |= (result >> 8 & 0x7) << 2;
charAtPosition |= result & 0x3;
sb.append((char)charAtPosition);
j++;
}
}
}
return sb.toString();
}
/**
* Weave the given secret message into the given {@link BufferedImage}.
*
* @param bufferedImage
* the buffered image
* @param message
* the secret message
* @return the buffered image with the secret message weaved in.
*/
public static BufferedImage weaveInto(final BufferedImage bufferedImage, final String message)
{
final int width = bufferedImage.getWidth();
final int height = bufferedImage.getHeight();
if (message.length() > 255)
{
throw new IllegalArgumentException("Given message is to large(max 255 characters)");
}
if (message.length() * 11 > width * height)
{
throw new IllegalArgumentException("Given image is to small");
}
final byte[] messageBytes = message.getBytes();
int messageLengthDecode = bufferedImage.getRGB(0, 0) >> 8 << 8;
messageLengthDecode |= message.length();
bufferedImage.setRGB(0, 0, messageLengthDecode);
for (int i = 1, messagePosition = 0, row = 0, j = 0; row < height; row++)
{
for (int column = 0; column < width && j < messageBytes.length; column++, i++)
{
if (i % 11 == 0)
{
int rgb = bufferedImage.getRGB(column, row);
final int a = rgb >> 24 & 0xff;
int r = (rgb >> 16 & 0xff) >> 3 << 3;
r = r | messageBytes[messagePosition] >> 5;
int g = (rgb >> 8 & 0xff) >> 3 << 3;
g = g | messageBytes[messagePosition] >> 2 & 7;
int b = (rgb & 0xff) >> 2 << 2;
b = b | messageBytes[messagePosition] & 0x3;
rgb = 0;
rgb = rgb | a << 24;
rgb = rgb | r << 16;
rgb = rgb | g << 8;
rgb = rgb | b;
bufferedImage.setRGB(column, row, rgb);
messagePosition++;
j++;
}
}
}
return bufferedImage;
}
/**
* Convenience method to write the given {@link BufferedImage} object to the given {@link File}
* object.
*
* @param bufferedImage
* the {@link BufferedImage} object to be written.
* @param formatName
* the format name
* @param outputfile
* the output file
* @return the file
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static File write(final BufferedImage bufferedImage, final String formatName,
final File outputfile) throws IOException
{
ImageIO.write(bufferedImage, formatName, outputfile);
return outputfile;
}
}
|
src/main/java/de/alpharogroup/swing/img/ImageExtensions.java
|
/**
* The MIT License
*
* Copyright (C) 2015 Asterios Raptis
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package de.alpharogroup.swing.img;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import javax.imageio.ImageIO;
import javax.swing.ImageIcon;
import org.imgscalr.Scalr;
import org.imgscalr.Scalr.Method;
import org.imgscalr.Scalr.Mode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.itextpdf.text.Document;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.Image;
import com.itextpdf.text.pdf.PdfWriter;
import de.alpharogroup.random.RandomExtensions;
/**
* The class {@link ImageExtensions}.
*/
public class ImageExtensions
{
/**
* The Enum Direction.
*/
public enum Direction
{
/** Indicates the horizontal direction. */
horizontal,
/** Indicates the vertical direction. */
vertical
}
/** The logger constant. */
private static final Logger LOG = LoggerFactory.getLogger(ImageExtensions.class.getName());
/**
* Concatenate the given list of BufferedImage objects to one image and returns the concatenated
* BufferedImage object.
*
* @param imgCollection
* the BufferedImage collection
* @param width
* the width of the image that will be returned.
* @param height
* the height of the image that will be returned.
* @param imageType
* type of the created image
* @param concatenationDirection
* the direction of the concatenation.
* @return the buffered image
*/
public static BufferedImage concatenateImages(final List<BufferedImage> imgCollection,
final int width, final int height, final int imageType,
final Direction concatenationDirection)
{
final BufferedImage img = new BufferedImage(width, height, imageType);
int x = 0;
int y = 0;
for (final BufferedImage bi : imgCollection)
{
final boolean imageDrawn = img.createGraphics().drawImage(bi, x, y, null);
if (!imageDrawn)
{
throw new RuntimeException("BufferedImage could not be drawn:" + bi.toString());
}
if (concatenationDirection.equals(Direction.vertical))
{
y += bi.getHeight();
}
else
{
x += bi.getWidth();
}
}
return img;
}
/**
* Creates from the given Collection of images an pdf file.
*
* @param result
* the output stream from the pdf file where the images shell be written.
* @param images
* the BufferedImage collection to be written in the pdf file.
* @throws DocumentException
* is thrown if an error occurs when trying to get an instance of {@link PdfWriter}.
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static void createPdf(final OutputStream result, final List<BufferedImage> images)
throws DocumentException, IOException
{
final Document document = new Document();
PdfWriter.getInstance(document, result);
for (final BufferedImage image : images)
{
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(image, "png", baos);
final Image img = Image.getInstance(baos.toByteArray());
document.setPageSize(img);
document.newPage();
img.setAbsolutePosition(0, 0);
document.add(img);
}
document.close();
}
/**
* Resize the given BufferedImage and returns the resized BufferedImage.
*
* @param originalImage
* the original image
* @param scalingMethod
* the scaling method
* @param resizeMode
* the resize mode
* @param formatName
* the format name
* @param targetWidth
* the target width
* @param targetHeight
* the target height
* @return the resized
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static BufferedImage getResized(final BufferedImage originalImage,
final Method scalingMethod, final Mode resizeMode, final String formatName,
final int targetWidth, final int targetHeight) throws IOException
{
return read(resize(originalImage, scalingMethod, resizeMode, formatName, targetWidth,
targetHeight));
}
/**
* Resize the given BufferedImage and returns the resized BufferedImage.
*
* @param originalImage
* the original image
* @param formatName
* the format name
* @param targetWidth
* the target width
* @param targetHeight
* the target height
* @return the resized
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static BufferedImage getResized(final BufferedImage originalImage,
final String formatName, final int targetWidth, final int targetHeight) throws IOException
{
return read(resize(originalImage, formatName, targetWidth, targetHeight));
}
/**
* Factory method for create a new {@link ImageIcon}.
*
* @param image
* the file that contains the image
* @return the new {@link ImageIcon}
*/
public static ImageIcon newImageIcon(File image)
{
ImageIcon img = new ImageIcon(image.getAbsolutePath());
return img;
}
/**
* Generates a random {@link BufferedImage} with the given parameters.
*
* @param width
* the width
* @param height
* the height
* @param imageType
* the type of the image
*
* @return The generated {@link BufferedImage}.
*/
public static BufferedImage randomBufferedImage(final int width, final int height,
final int imageType)
{
final BufferedImage img = new BufferedImage(width, height, imageType);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
img.setRGB(x, y, RandomExtensions.newRandomPixel());
}
}
return img;
}
/**
* Gets the buffered image from the given byte array.
*
* @param byteArray
* the byte array
* @return the buffered image
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static BufferedImage read(final byte[] byteArray) throws IOException
{
return ImageIO.read(new ByteArrayInputStream(byteArray));
}
/**
* Gets the buffered image from the given byte array quietly.
*
* @param byteArray
* the byte array
* @return the buffered image or null if the read process failed.
*/
public static BufferedImage readQuietly(final byte[] byteArray)
{
BufferedImage img = null;
try
{
img = read(byteArray);
}
catch (IOException e)
{
LOG.error("Reading image failed.", e);
}
return img;
}
/**
* Gets the buffered image from the given byte array quietly.
*
* @param input
* the input
* @return the buffered image or null if the read process failed.
*/
public static BufferedImage readQuietly(final InputStream input)
{
BufferedImage img = null;
try
{
img = ImageIO.read(input);
}
catch (IOException e)
{
LOG.error("Reading image failed.", e);
}
return img;
}
/**
* Resize the given image.
*
* @param originalImage
* the original image
* @param scalingMethod
* the scaling method
* @param resizeMode
* the resize mode
* @param formatName
* the format name
* @param targetWidth
* the target width
* @param targetHeight
* the target height
* @return the byte[]
*/
public static byte[] resize(final BufferedImage originalImage, final Method scalingMethod,
final Mode resizeMode, final String formatName, final int targetWidth,
final int targetHeight)
{
try
{
final BufferedImage resizedImage = Scalr.resize(originalImage, scalingMethod,
resizeMode, targetWidth, targetHeight);
return toByteArray(resizedImage, formatName);
}
catch (final Exception e)
{
return null;
}
}
/**
* Resize the given BufferedImage.
*
* @param originalImage
* the original image
* @param formatName
* the format name
* @param targetWidth
* the target width
* @param targetHeight
* the target height
* @return the byte[]
*/
public static byte[] resize(final BufferedImage originalImage, final String formatName,
final int targetWidth, final int targetHeight)
{
return resize(originalImage, Scalr.Method.QUALITY, Scalr.Mode.FIT_EXACT, formatName,
targetWidth, targetHeight);
}
/**
* Converts the given BufferedImage to a byte array.
*
* @param bi
* the bi
* @param formatName
* the format name
* @return the byte[]
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static byte[] toByteArray(final BufferedImage bi, final String formatName)
throws IOException
{
try (ByteArrayOutputStream baos = new ByteArrayOutputStream())
{
ImageIO.write(bi, formatName, baos);
baos.flush();
final byte[] byteArray = baos.toByteArray();
return byteArray;
}
}
/**
* Unweave a secret message from the given {@link BufferedImage}.
*
* @param bufferedImage
* the buffered image
* @return the secret message
*/
public static String unweaveFrom(final BufferedImage bufferedImage)
{
final int width = bufferedImage.getWidth();
final int height = bufferedImage.getHeight();
final int messageLength = bufferedImage.getRGB(0, 0) & 0xff;
final StringBuilder sb = new StringBuilder();
for (int row = 0, j = 0, i = 1; row < height; row++)
{
for (int column = 0; column < width && j < messageLength; column++, i++)
{
if (i % 11 == 0)
{
final int result = bufferedImage.getRGB(column, row);
int charAtPosition = (result >> 16 & 0x7) << 5;
charAtPosition |= (result >> 8 & 0x7) << 2;
charAtPosition |= result & 0x3;
sb.append((char)charAtPosition);
j++;
}
}
}
return sb.toString();
}
/**
* Weave the given secret message into the given {@link BufferedImage}.
*
* @param bufferedImage
* the buffered image
* @param message
* the secret message
* @return the buffered image with the secret message weaved in.
*/
public static BufferedImage weaveInto(final BufferedImage bufferedImage, final String message)
{
final int width = bufferedImage.getWidth();
final int height = bufferedImage.getHeight();
if (message.length() > 255)
{
throw new IllegalArgumentException("Given message is to large(max 255 characters)");
}
if (message.length() * 11 > width * height)
{
throw new IllegalArgumentException("Given image is to small");
}
final byte[] messageBytes = message.getBytes();
int messageLengthDecode = bufferedImage.getRGB(0, 0) >> 8 << 8;
messageLengthDecode |= message.length();
bufferedImage.setRGB(0, 0, messageLengthDecode);
for (int i = 1, messagePosition = 0, row = 0, j = 0; row < height; row++)
{
for (int column = 0; column < width && j < messageBytes.length; column++, i++)
{
if (i % 11 == 0)
{
int rgb = bufferedImage.getRGB(column, row);
final int a = rgb >> 24 & 0xff;
int r = (rgb >> 16 & 0xff) >> 3 << 3;
r = r | messageBytes[messagePosition] >> 5;
int g = (rgb >> 8 & 0xff) >> 3 << 3;
g = g | messageBytes[messagePosition] >> 2 & 7;
int b = (rgb & 0xff) >> 2 << 2;
b = b | messageBytes[messagePosition] & 0x3;
rgb = 0;
rgb = rgb | a << 24;
rgb = rgb | r << 16;
rgb = rgb | g << 8;
rgb = rgb | b;
bufferedImage.setRGB(column, row, rgb);
messagePosition++;
j++;
}
}
}
return bufferedImage;
}
/**
* Convenience method to write the given {@link BufferedImage} object to the given {@link File}
* object.
*
* @param bufferedImage
* the {@link BufferedImage} object to be written.
* @param formatName
* the format name
* @param outputfile
* the output file
* @return the file
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static File write(final BufferedImage bufferedImage, final String formatName,
final File outputfile) throws IOException
{
ImageIO.write(bufferedImage, formatName, outputfile);
return outputfile;
}
}
|
Update ImageExtensions.java
|
src/main/java/de/alpharogroup/swing/img/ImageExtensions.java
|
Update ImageExtensions.java
|
|
Java
|
mit
|
c383a13390a5ee5d94c335ff9d1071c6808a2c78
| 0
|
y20k/transistor,meonwax/transistor
|
/**
* MainActivityFragment.java
* Implements the main fragment of the main activity
* This fragment is a list view of radio stations
*
* This file is part of
* TRANSISTOR - Radio App for Android
*
* Copyright (c) 2015 - Y20K.org
* Licensed under the MIT-License
* http://opensource.org/licenses/MIT
*/
package org.y20k.transistor;
import android.Manifest;
import android.app.Activity;
import android.app.Application;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.os.Parcelable;
import android.os.Vibrator;
import android.preference.PreferenceManager;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.Fragment;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListView;
import android.widget.Toast;
import org.y20k.transistor.core.Collection;
import org.y20k.transistor.helpers.CollectionAdapter;
import org.y20k.transistor.helpers.DialogAddStation;
import org.y20k.transistor.helpers.ImageHelper;
import org.y20k.transistor.helpers.StationDownloader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.LinkedList;
/**
* MainActivityFragment class
*/
public final class MainActivityFragment extends Fragment {
/* Define log tag */
private static final String LOG_TAG = MainActivityFragment.class.getSimpleName();
/* Keys */
private static final String ACTION_COLLECTION_CHANGED = "org.y20k.transistor.action.COLLECTION_CHANGED";
private static final String ACTION_PLAYBACK_STARTED = "org.y20k.transistor.action.PLAYBACK_STARTED";
private static final String ACTION_PLAYBACK_STOPPED = "org.y20k.transistor.action.PLAYBACK_STOPPED";
private static final String ACTION_IMAGE_CHANGE_REQUESTED = "org.y20k.transistor.action.IMAGE_CHANGE_REQUESTED";
private static final String LIST_STATE = "ListState";
private static final String STREAM_URI = "streamUri";
private static final String STATION_NAME = "stationName";
private static final String STATION_ID = "stationID";
private static final String STATION_ID_CURRENT = "stationIDCurrent";
private static final String STATION_ID_LAST = "stationIDLast";
private static final String PLAYBACK = "playback";
private static final String TITLE = "title";
private static final String CONTENT = "content";
private static final int REQUEST_LOAD_IMAGE = 1;
private static final int PERMISSION_REQUEST_READ_EXTERNAL_STORAGE = 1;
/* Main class variables */
private Application mApplication;
private Activity mActivity;
private Collection mCollection;
private CollectionAdapter mCollectionAdapter = null;
private File mFolder;
private LinkedList<String> mStationNames;
private LinkedList<Bitmap> mStationImages;
private View mRootView;
private ListView mListView;
private Parcelable mListState;
private int mTempStationImageID;
private PlayerService mPlayerService;
/* Constructor (default) */
public MainActivityFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// get activity and application contexts
mActivity = getActivity();
mApplication = mActivity.getApplication();
// initiate playback service
mPlayerService = new PlayerService();
// set list state null
mListState = null;
// initialize temporary station image id
mTempStationImageID = -1;
try {
// get collection folder from external storage
mFolder = new File(mActivity.getExternalFilesDir("Collection").toString());
} catch (NullPointerException e) {
// notify user and log exception
Toast.makeText(mActivity, R.string.toastalert_no_external_storage, Toast.LENGTH_LONG).show();
Log.e(LOG_TAG, "Unable to access external storage.");
// finish activity
mActivity.finish();
}
// fragment has options menu
setHasOptionsMenu(true);
// create adapter for collection
mStationNames = new LinkedList<>();
mStationImages = new LinkedList<>();
mCollectionAdapter = new CollectionAdapter(mActivity, mStationNames, mStationImages);
// listen for data change in mCollection adapter
mCollectionAdapter.setCollectionChangedListener(new CollectionAdapter.CollectionChangedListener() {
@Override
public void collectionChanged() {
refreshStationList();
}
});
// initialize broadcast receivers
initializeBroadcastReceivers();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// get list state from saved instance
if (savedInstanceState != null) {
mListState = savedInstanceState.getParcelable(MainActivityFragment.LIST_STATE);
}
// inflate rootview from xml
mRootView = inflater.inflate(R.layout.fragment_main, container, false);
// get reference to list view from inflated root view
mListView = (ListView) mRootView.findViewById(R.id.main_listview_collection);
// attach adapter to list view
mListView.setAdapter(mCollectionAdapter);
// attach OnItemClickListener to mListView (single tap)
mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
// inner method override for OnItemClickListener
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
if (mCollection != null) {
// get station name and URL from position
String stationName = mCollection.getStations().get((Integer) mCollectionAdapter.getItem(position)).getStationName();
String streamUri = mCollection.getStations().get((Integer) mCollectionAdapter.getItem(position)).getStreamUri().toString();
// add name, url and id of station to intent
Intent intent = new Intent(mActivity, PlayerActivity.class);
intent.putExtra(STATION_NAME, stationName);
intent.putExtra(STREAM_URI, streamUri);
intent.putExtra(STATION_ID, position);
// start activity with intent
startActivity(intent);
}
}
});
// attach OnItemLongClickListener to mListView (tap and hold)
mListView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {
@Override
public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) {
handleLongClick(position);
return true;
}
});
// return list view
return mRootView;
}
@Override
public void onResume() {
super.onResume();
// handle incoming intent
handleNewStationIntent();
}
@Override
public void onStart() {
super.onStart();
// fill collection adapter with stations
refreshStationList();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
// action bar - add
if (id == R.id.menu_add) {
DialogAddStation dialog = new DialogAddStation(mActivity);
dialog.show();
return true;
}
// action bar menu - about
else if (id == R.id.menu_about) {
// get title and content
String title = mActivity.getString(R.string.header_about);
String content = mActivity.getString(R.string.html_about);
// create intent
Intent intent = new Intent(mActivity, InfosheetActivity.class);
// put title and content to intent
intent.putExtra(TITLE, title);
intent.putExtra(CONTENT, content);
// start activity
startActivity(intent);
return true;
}
// action bar menu - how to
else if (id == R.id.menu_howto) {
// get title and content
String title = mActivity.getString(R.string.header_howto);
String content = mActivity.getString(R.string.html_howto);
// create intent
Intent intent = new Intent(mActivity, InfosheetActivity.class);
// put title and content to intent
intent.putExtra(TITLE, title);
intent.putExtra(CONTENT, content);
// start activity
startActivity(intent);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// save list view position
mListState = mListView.onSaveInstanceState();
outState.putParcelable(LIST_STATE, mListState);
}
/* Fills collection adapter */
private void fillCollectionAdapter() {
Bitmap stationImage;
Bitmap stationImageSmall;
String stationName;
ImageHelper imageHelper;
// create collection
Log.v(LOG_TAG, "Create collection of stations (folder:" + mFolder.toString() + ").");
mCollection = new Collection(mFolder);
// put stations into collection adapter
for (int i = 0; i < mCollection.getStations().size(); i++) {
// set name of station
stationName = mCollection.getStations().get(i).getStationName();
// add name to linked list of names
mStationNames.add(stationName);
// set image for station
if (mCollection.getStations().get(i).getStationImageFile().exists()) {
// station image
stationImageSmall = BitmapFactory.decodeFile(mCollection.getStations().get(i).getStationImageFile().toString());
} else {
// default image
stationImageSmall = BitmapFactory.decodeResource(mActivity.getResources(), R.drawable.ic_notesymbol);
}
imageHelper = new ImageHelper(stationImageSmall, mActivity);
imageHelper.setBackgroundColor(R.color.transistor_grey_lighter);
stationImage = imageHelper.createCircularFramedImage(192);
// add image to linked list of images
mStationImages.add(stationImage);
}
mCollectionAdapter.setCollection(mCollection);
mCollectionAdapter.notifyDataSetChanged();
}
/* (Re-)fills collection adapter with stations */
private void refreshStationList() {
// clear and refill mCollection adapter
if (!mStationNames.isEmpty() && !mStationImages.isEmpty()) {
mStationNames.clear();
mStationImages.clear();
}
fillCollectionAdapter();
// show call to action, if necessary
View actioncall = mRootView.findViewById(R.id.main_actioncall_layout);
if (mCollectionAdapter.isEmpty()) {
actioncall.setVisibility(View.VISIBLE);
} else {
actioncall.setVisibility(View.GONE);
}
}
/* handles external taps on streaming links */
private void handleNewStationIntent() {
// get intent
Intent intent = mActivity.getIntent();
// check for intent of tyoe VIEW
if (Intent.ACTION_VIEW.equals(intent.getAction())) {
// set new station URL
String newStationURL;
// mime type check
if (intent.getType() != null && intent.getType().startsWith("audio/")) {
newStationURL = intent.getDataString();
}
// no mime type
else {
newStationURL = intent.getDataString();
}
// clear the intent
intent.setAction("");
// check for null
if (newStationURL != null) {
// download and add new station
StationDownloader stationDownloader = new StationDownloader(newStationURL, mActivity);
stationDownloader.execute();
// send local broadcast
Intent i = new Intent();
i.setAction(ACTION_COLLECTION_CHANGED);
LocalBroadcastManager.getInstance(mActivity).sendBroadcast(i);
}
// unsuccessful - log failure
else {
Log.v(LOG_TAG, "Received an empty intent");
}
}
}
/* Initializes broadcast receivers fot onCreate */
private void initializeBroadcastReceivers() {
// broadcast receiver: player service stopped playback
BroadcastReceiver playbackStoppedReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
refreshStationList();
}
};
IntentFilter playbackStoppedIntentFilter = new IntentFilter(ACTION_PLAYBACK_STOPPED);
LocalBroadcastManager.getInstance(mApplication).registerReceiver(playbackStoppedReceiver, playbackStoppedIntentFilter);
// broadcast receiver: player service stopped playback
BroadcastReceiver playbackStartedReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
refreshStationList();
}
};
IntentFilter playbackStartedIntentFilter = new IntentFilter(ACTION_PLAYBACK_STARTED);
LocalBroadcastManager.getInstance(mApplication).registerReceiver(playbackStartedReceiver, playbackStartedIntentFilter);
// broadcast receiver: station added, deleted, or changed
BroadcastReceiver collectionChangedReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
refreshStationList();
}
};
IntentFilter collectionChangedIntentFilter = new IntentFilter(ACTION_COLLECTION_CHANGED);
LocalBroadcastManager.getInstance(mApplication).registerReceiver(collectionChangedReceiver, collectionChangedIntentFilter);
// broadcast receiver: listen for request to change station image
BroadcastReceiver imageChangeRequestReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
// get station id and save it
mTempStationImageID = intent.getIntExtra(STATION_ID, -1);
// start image picker
selectFromImagePicker();
}
};
IntentFilter imageChangeRequesIntentFilter = new IntentFilter(ACTION_IMAGE_CHANGE_REQUESTED);
LocalBroadcastManager.getInstance(mApplication).registerReceiver(imageChangeRequestReceiver, imageChangeRequesIntentFilter);
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case PERMISSION_REQUEST_READ_EXTERNAL_STORAGE: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// permission granted - get system picker for images
Intent pickImageIntent = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(pickImageIntent, REQUEST_LOAD_IMAGE);
} else {
// permission denied
}
}
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_LOAD_IMAGE && resultCode == Activity.RESULT_OK && null != data) {
// retrieve selected image from image picker
processNewImage(data.getData());
}
}
/* Processes new image and saves it to storage */
private void processNewImage(Uri newImageUri) {
ImageHelper imageHelper = new ImageHelper(newImageUri, mActivity);
Bitmap newImage = imageHelper.getInputImage();
if (newImage != null) {
// write image to storage
File stationImageFile = mCollection.getStations().get(mTempStationImageID).getStationImageFile();
try (FileOutputStream out = new FileOutputStream(stationImageFile)) {
newImage.compress(Bitmap.CompressFormat.PNG, 100, out);
} catch (IOException e) {
Log.e(LOG_TAG, "Unable to save: " + newImage.toString());
}
} else {
Log.e(LOG_TAG, "Unable to get image from media picker: " + newImageUri.toString());
// TODO handle error here
}
}
/* Check permissions and start image picker */
private void selectFromImagePicker() {
// permission to read external storage granted
if (ActivityCompat.checkSelfPermission(mActivity,
Manifest.permission.READ_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) {
// get system picker for images
Intent pickImageIntent = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
mActivity.startActivityForResult(pickImageIntent, REQUEST_LOAD_IMAGE);
}
// permission to read external storage not granted
else {
if (ActivityCompat.shouldShowRequestPermissionRationale(mActivity, Manifest.permission.READ_EXTERNAL_STORAGE)) {
// ask for permission and explain why
Snackbar snackbar = Snackbar.make(mRootView, R.string.snackbar_request_storage_access, Snackbar.LENGTH_INDEFINITE);
snackbar.setAction(R.string.dialog_generic_button_okay, new View.OnClickListener() {
@Override
public void onClick(View view) {
ActivityCompat.requestPermissions(mActivity, new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},
PERMISSION_REQUEST_READ_EXTERNAL_STORAGE);
}
});
snackbar.show();
} else {
// ask for permission without explanation
ActivityCompat.requestPermissions(mActivity, new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},
PERMISSION_REQUEST_READ_EXTERNAL_STORAGE);
}
}
}
/* Handles long click on list item */
private void handleLongClick(int position) {
int stationIDCurrent;
int stationIDLast;
// get current playback state
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity);
stationIDCurrent = settings.getInt(STATION_ID_CURRENT, -1);
boolean playback = settings.getBoolean(PLAYBACK, false);
if (playback && position == stationIDCurrent ) {
// stop playback service
mPlayerService.startActionStop(mActivity);
stationIDLast = stationIDCurrent;
playback = false;
Toast.makeText(mActivity, R.string.toastmessage_long_press_playback_stopped, Toast.LENGTH_LONG).show();
} else {
// start playback service
String stationName = mCollection.getStations().get((Integer) mCollectionAdapter.getItem(position)).getStationName();
String streamUri = mCollection.getStations().get((Integer) mCollectionAdapter.getItem(position)).getStreamUri().toString();
mPlayerService.startActionPlay(mActivity, streamUri, stationName);
stationIDLast = stationIDCurrent;
stationIDCurrent = position;
playback = true;
Toast.makeText(mActivity, R.string.toastmessage_long_press_playback_started, Toast.LENGTH_LONG).show();
}
// vibrate 100 milliseconds
Vibrator v = (Vibrator) mActivity.getSystemService(Context.VIBRATOR_SERVICE);
v.vibrate(100);
// Save station name and ID and playback state
SharedPreferences.Editor editor = settings.edit();
editor.putInt(STATION_ID_CURRENT, stationIDCurrent);
editor.putInt(STATION_ID_LAST, stationIDLast);
editor.putBoolean(PLAYBACK, playback);
editor.apply();
// refresh view
refreshStationList();
}
}
|
app/src/main/java/org/y20k/transistor/MainActivityFragment.java
|
/**
* MainActivityFragment.java
* Implements the main fragment of the main activity
* This fragment is a list view of radio stations
*
* This file is part of
* TRANSISTOR - Radio App for Android
*
* Copyright (c) 2015 - Y20K.org
* Licensed under the MIT-License
* http://opensource.org/licenses/MIT
*/
package org.y20k.transistor;
import android.Manifest;
import android.app.Activity;
import android.app.Application;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.os.Parcelable;
import android.os.Vibrator;
import android.preference.PreferenceManager;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.Fragment;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListView;
import android.widget.Toast;
import org.y20k.transistor.core.Collection;
import org.y20k.transistor.helpers.CollectionAdapter;
import org.y20k.transistor.helpers.DialogAddStation;
import org.y20k.transistor.helpers.ImageHelper;
import org.y20k.transistor.helpers.StationDownloader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.LinkedList;
/**
* MainActivityFragment class
*/
public final class MainActivityFragment extends Fragment {
/* Define log tag */
private static final String LOG_TAG = MainActivityFragment.class.getSimpleName();
/* Keys */
private static final String ACTION_COLLECTION_CHANGED = "org.y20k.transistor.action.COLLECTION_CHANGED";
private static final String ACTION_PLAYBACK_STARTED = "org.y20k.transistor.action.PLAYBACK_STARTED";
private static final String ACTION_PLAYBACK_STOPPED = "org.y20k.transistor.action.PLAYBACK_STOPPED";
private static final String ACTION_IMAGE_CHANGE_REQUESTED = "org.y20k.transistor.action.IMAGE_CHANGE_REQUESTED";
private static final String LIST_STATE = "ListState";
private static final String STREAM_URI = "streamUri";
private static final String STATION_NAME = "stationName";
private static final String STATION_ID = "stationID";
private static final String STATION_ID_CURRENT = "stationIDCurrent";
private static final String STATION_ID_LAST = "stationIDLast";
private static final String PLAYBACK = "playback";
private static final String TITLE = "title";
private static final String CONTENT = "content";
private static final int REQUEST_LOAD_IMAGE = 1;
private static final int PERMISSION_REQUEST_READ_EXTERNAL_STORAGE = 1;
/* Main class variables */
private Application mApplication;
private Activity mActivity;
private Collection mCollection;
private CollectionAdapter mCollectionAdapter = null;
private File mFolder;
private LinkedList<String> mStationNames;
private LinkedList<Bitmap> mStationImages;
private View mRootView;
private ListView mListView;
private Parcelable mListState;
private int mTempStationImageID;
private PlayerService mPlayerService;
/* Constructor (default) */
public MainActivityFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// get activity and application contexts
mActivity = getActivity();
mApplication = mActivity.getApplication();
// set list state null
mListState = null;
// initialize temporary station image id
mTempStationImageID = -1;
try {
// get collection folder from external storage
mFolder = new File(mActivity.getExternalFilesDir("Collection").toString());
} catch (NullPointerException e) {
// notify user and log exception
Toast.makeText(mActivity, R.string.toastalert_no_external_storage, Toast.LENGTH_LONG).show();
Log.e(LOG_TAG, "Unable to access external storage.");
// finish activity
mActivity.finish();
}
// fragment has options menu
setHasOptionsMenu(true);
// create adapter for collection
mStationNames = new LinkedList<>();
mStationImages = new LinkedList<>();
mCollectionAdapter = new CollectionAdapter(mActivity, mStationNames, mStationImages);
// listen for data change in mCollection adapter
mCollectionAdapter.setCollectionChangedListener(new CollectionAdapter.CollectionChangedListener() {
@Override
public void collectionChanged() {
refreshStationList();
}
});
// initialize broadcast receivers
initializeBroadcastReceivers();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// get list state from saved instance
if (savedInstanceState != null) {
mListState = savedInstanceState.getParcelable(MainActivityFragment.LIST_STATE);
}
// initiate playback service
mPlayerService = new PlayerService();
// inflate rootview from xml
mRootView = inflater.inflate(R.layout.fragment_main, container, false);
// get reference to list view from inflated root view
mListView = (ListView) mRootView.findViewById(R.id.main_listview_collection);
// attach adapter to list view
mListView.setAdapter(mCollectionAdapter);
// attach OnItemClickListener to mListView (single tap)
mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
// inner method override for OnItemClickListener
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
if (mCollection != null) {
// get station name and URL from position
String stationName = mCollection.getStations().get((Integer) mCollectionAdapter.getItem(position)).getStationName();
String streamUri = mCollection.getStations().get((Integer) mCollectionAdapter.getItem(position)).getStreamUri().toString();
// add name, url and id of station to intent
Intent intent = new Intent(mActivity, PlayerActivity.class);
intent.putExtra(STATION_NAME, stationName);
intent.putExtra(STREAM_URI, streamUri);
intent.putExtra(STATION_ID, position);
// start activity with intent
startActivity(intent);
}
}
});
// attach OnItemLongClickListener to mListView (tap and hold)
mListView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {
@Override
public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) {
handleLongClick(position);
return true;
}
});
// return list view
return mRootView;
}
@Override
public void onResume() {
super.onResume();
// handle incoming intent
handleNewStationIntent();
}
@Override
public void onStart() {
super.onStart();
// fill collection adapter with stations
refreshStationList();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
// action bar - add
if (id == R.id.menu_add) {
DialogAddStation dialog = new DialogAddStation(mActivity);
dialog.show();
return true;
}
// action bar menu - about
else if (id == R.id.menu_about) {
// get title and content
String title = mActivity.getString(R.string.header_about);
String content = mActivity.getString(R.string.html_about);
// create intent
Intent intent = new Intent(mActivity, InfosheetActivity.class);
// put title and content to intent
intent.putExtra(TITLE, title);
intent.putExtra(CONTENT, content);
// start activity
startActivity(intent);
return true;
}
// action bar menu - how to
else if (id == R.id.menu_howto) {
// get title and content
String title = mActivity.getString(R.string.header_howto);
String content = mActivity.getString(R.string.html_howto);
// create intent
Intent intent = new Intent(mActivity, InfosheetActivity.class);
// put title and content to intent
intent.putExtra(TITLE, title);
intent.putExtra(CONTENT, content);
// start activity
startActivity(intent);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// save list view position
mListState = mListView.onSaveInstanceState();
outState.putParcelable(LIST_STATE, mListState);
}
/* Fills collection adapter */
private void fillCollectionAdapter() {
Bitmap stationImage;
Bitmap stationImageSmall;
String stationName;
ImageHelper imageHelper;
// create collection
Log.v(LOG_TAG, "Create collection of stations (folder:" + mFolder.toString() + ").");
mCollection = new Collection(mFolder);
// put stations into collection adapter
for (int i = 0; i < mCollection.getStations().size(); i++) {
// set name of station
stationName = mCollection.getStations().get(i).getStationName();
// add name to linked list of names
mStationNames.add(stationName);
// set image for station
if (mCollection.getStations().get(i).getStationImageFile().exists()) {
// station image
stationImageSmall = BitmapFactory.decodeFile(mCollection.getStations().get(i).getStationImageFile().toString());
} else {
// default image
stationImageSmall = BitmapFactory.decodeResource(mActivity.getResources(), R.drawable.ic_notesymbol);
}
imageHelper = new ImageHelper(stationImageSmall, mActivity);
imageHelper.setBackgroundColor(R.color.transistor_grey_lighter);
stationImage = imageHelper.createCircularFramedImage(192);
// add image to linked list of images
mStationImages.add(stationImage);
}
mCollectionAdapter.setCollection(mCollection);
mCollectionAdapter.notifyDataSetChanged();
}
/* (Re-)fills collection adapter with stations */
private void refreshStationList() {
// clear and refill mCollection adapter
if (!mStationNames.isEmpty() && !mStationImages.isEmpty()) {
mStationNames.clear();
mStationImages.clear();
}
fillCollectionAdapter();
// show call to action, if necessary
View actioncall = mRootView.findViewById(R.id.main_actioncall_layout);
if (mCollectionAdapter.isEmpty()) {
actioncall.setVisibility(View.VISIBLE);
} else {
actioncall.setVisibility(View.GONE);
}
}
/* handles external taps on streaming links */
private void handleNewStationIntent() {
// get intent
Intent intent = mActivity.getIntent();
// check for intent of tyoe VIEW
if (Intent.ACTION_VIEW.equals(intent.getAction())) {
// set new station URL
String newStationURL;
// mime type check
if (intent.getType() != null && intent.getType().startsWith("audio/")) {
newStationURL = intent.getDataString();
}
// no mime type
else {
newStationURL = intent.getDataString();
}
// clear the intent
intent.setAction("");
// check for null
if (newStationURL != null) {
// download and add new station
StationDownloader stationDownloader = new StationDownloader(newStationURL, mActivity);
stationDownloader.execute();
// send local broadcast
Intent i = new Intent();
i.setAction(ACTION_COLLECTION_CHANGED);
LocalBroadcastManager.getInstance(mActivity).sendBroadcast(i);
}
// unsuccessful - log failure
else {
Log.v(LOG_TAG, "Received an empty intent");
}
}
}
/* Initializes broadcast receivers fot onCreate */
private void initializeBroadcastReceivers() {
// broadcast receiver: player service stopped playback
BroadcastReceiver playbackStoppedReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
refreshStationList();
}
};
IntentFilter playbackStoppedIntentFilter = new IntentFilter(ACTION_PLAYBACK_STOPPED);
LocalBroadcastManager.getInstance(mApplication).registerReceiver(playbackStoppedReceiver, playbackStoppedIntentFilter);
// broadcast receiver: player service stopped playback
BroadcastReceiver playbackStartedReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
refreshStationList();
}
};
IntentFilter playbackStartedIntentFilter = new IntentFilter(ACTION_PLAYBACK_STARTED);
LocalBroadcastManager.getInstance(mApplication).registerReceiver(playbackStartedReceiver, playbackStartedIntentFilter);
// broadcast receiver: station added, deleted, or changed
BroadcastReceiver collectionChangedReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
refreshStationList();
}
};
IntentFilter collectionChangedIntentFilter = new IntentFilter(ACTION_COLLECTION_CHANGED);
LocalBroadcastManager.getInstance(mApplication).registerReceiver(collectionChangedReceiver, collectionChangedIntentFilter);
// broadcast receiver: listen for request to change station image
BroadcastReceiver imageChangeRequestReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
// get station id and save it
mTempStationImageID = intent.getIntExtra(STATION_ID, -1);
// start image picker
selectFromImagePicker();
}
};
IntentFilter imageChangeRequesIntentFilter = new IntentFilter(ACTION_IMAGE_CHANGE_REQUESTED);
LocalBroadcastManager.getInstance(mApplication).registerReceiver(imageChangeRequestReceiver, imageChangeRequesIntentFilter);
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case PERMISSION_REQUEST_READ_EXTERNAL_STORAGE: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// permission granted - get system picker for images
Intent pickImageIntent = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(pickImageIntent, REQUEST_LOAD_IMAGE);
} else {
// permission denied
}
}
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_LOAD_IMAGE && resultCode == Activity.RESULT_OK && null != data) {
// retrieve selected image from image picker
processNewImage(data.getData());
}
}
/* Processes new image and saves it to storage */
private void processNewImage(Uri newImageUri) {
ImageHelper imageHelper = new ImageHelper(newImageUri, mActivity);
Bitmap newImage = imageHelper.getInputImage();
if (newImage != null) {
// write image to storage
File stationImageFile = mCollection.getStations().get(mTempStationImageID).getStationImageFile();
try (FileOutputStream out = new FileOutputStream(stationImageFile)) {
newImage.compress(Bitmap.CompressFormat.PNG, 100, out);
} catch (IOException e) {
Log.e(LOG_TAG, "Unable to save: " + newImage.toString());
}
} else {
Log.e(LOG_TAG, "Unable to get image from media picker: " + newImageUri.toString());
// TODO handle error here
}
}
/* Check permissions and start image picker */
private void selectFromImagePicker() {
// permission to read external storage granted
if (ActivityCompat.checkSelfPermission(mActivity,
Manifest.permission.READ_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) {
// get system picker for images
Intent pickImageIntent = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
mActivity.startActivityForResult(pickImageIntent, REQUEST_LOAD_IMAGE);
}
// permission to read external storage not granted
else {
if (ActivityCompat.shouldShowRequestPermissionRationale(mActivity, Manifest.permission.READ_EXTERNAL_STORAGE)) {
// ask for permission and explain why
Snackbar snackbar = Snackbar.make(mRootView, R.string.snackbar_request_storage_access, Snackbar.LENGTH_INDEFINITE);
snackbar.setAction(R.string.dialog_generic_button_okay, new View.OnClickListener() {
@Override
public void onClick(View view) {
ActivityCompat.requestPermissions(mActivity, new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},
PERMISSION_REQUEST_READ_EXTERNAL_STORAGE);
}
});
snackbar.show();
} else {
// ask for permission without explanation
ActivityCompat.requestPermissions(mActivity, new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},
PERMISSION_REQUEST_READ_EXTERNAL_STORAGE);
}
}
}
/* Handles long click on list item */
private void handleLongClick(int position) {
int stationIDCurrent;
int stationIDLast;
// get current playback state
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity);
stationIDCurrent = settings.getInt(STATION_ID_CURRENT, -1);
boolean playback = settings.getBoolean(PLAYBACK, false);
if (playback && position == stationIDCurrent ) {
// stop playback service
mPlayerService.startActionStop(mActivity);
stationIDLast = stationIDCurrent;
playback = false;
Toast.makeText(mActivity, R.string.toastmessage_long_press_playback_stopped, Toast.LENGTH_LONG).show();
} else {
// start playback service
String stationName = mCollection.getStations().get((Integer) mCollectionAdapter.getItem(position)).getStationName();
String streamUri = mCollection.getStations().get((Integer) mCollectionAdapter.getItem(position)).getStreamUri().toString();
mPlayerService.startActionPlay(mActivity, streamUri, stationName);
stationIDLast = stationIDCurrent;
stationIDCurrent = position;
playback = true;
Toast.makeText(mActivity, R.string.toastmessage_long_press_playback_started, Toast.LENGTH_LONG).show();
}
// vibrate 100 milliseconds
Vibrator v = (Vibrator) mActivity.getSystemService(Context.VIBRATOR_SERVICE);
v.vibrate(100);
// Save station name and ID and playback state
SharedPreferences.Editor editor = settings.edit();
editor.putInt(STATION_ID_CURRENT, stationIDCurrent);
editor.putInt(STATION_ID_LAST, stationIDLast);
editor.putBoolean(PLAYBACK, playback);
editor.apply();
// refresh view
refreshStationList();
}
}
|
possible fix for bug "Simultaneous playback of several radiostations" (#31)
|
app/src/main/java/org/y20k/transistor/MainActivityFragment.java
|
possible fix for bug "Simultaneous playback of several radiostations" (#31)
|
|
Java
|
mit
|
9498dbfd93eadf25052ed59888d442d6e21cc130
| 0
|
itinance/react-native-fs,johanneslumpe/react-native-fs,itinance/react-native-fs,johanneslumpe/react-native-fs,johanneslumpe/react-native-fs,itinance/react-native-fs,itinance/react-native-fs
|
package com.rnfs;
import java.io.IOException;
import java.util.Map;
import java.util.HashMap;
import android.os.Environment;
import android.os.StatFs;
import android.util.Base64;
import android.support.annotation.Nullable;
import android.util.SparseArray;
import java.io.File;
import java.io.OutputStream;
import java.io.InputStream;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.net.URL;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.modules.core.RCTNativeAppEventEmitter;
public class RNFSManager extends ReactContextBaseJavaModule {
private static final String NSDocumentDirectoryPath = "NSDocumentDirectoryPath";
private static final String NSExternalDirectoryPath = "NSExternalDirectoryPath";
private static final String NSPicturesDirectoryPath = "NSPicturesDirectoryPath";
private static final String NSTemporaryDirectoryPath = "NSTemporaryDirectoryPath";
private static final String NSCachesDirectoryPath = "NSCachesDirectoryPath";
private static final String NSDocumentDirectory = "NSDocumentDirectory";
private static final String NSFileTypeRegular = "NSFileTypeRegular";
private static final String NSFileTypeDirectory = "NSFileTypeDirectory";
private SparseArray<Downloader> downloaders = new SparseArray<Downloader>();
public RNFSManager(ReactApplicationContext reactContext) {
super(reactContext);
}
@Override
public String getName() {
return "RNFSManager";
}
@ReactMethod
public void writeFile(String filepath, String base64Content, Promise promise) {
try {
byte[] bytes = Base64.decode(base64Content, Base64.DEFAULT);
FileOutputStream outputStream = new FileOutputStream(filepath, false);
outputStream.write(bytes);
outputStream.close();
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void appendFile(String filepath, String base64Content, Promise promise) {
try {
byte[] bytes = Base64.decode(base64Content, Base64.DEFAULT);
FileOutputStream outputStream = new FileOutputStream(filepath, true);
outputStream.write(bytes);
outputStream.close();
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void exists(String filepath, Promise promise) {
try {
File file = new File(filepath);
promise.resolve(file.exists());
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void readFile(String filepath, Promise promise) {
try {
File file = new File(filepath);
if (file.isDirectory()) {
rejectFileIsDirectory(promise);
return;
}
if (!file.exists()) {
rejectFileNotFound(promise, filepath);
return;
}
FileInputStream inputStream = new FileInputStream(filepath);
byte[] buffer = new byte[(int)file.length()];
inputStream.read(buffer);
String base64Content = Base64.encodeToString(buffer, Base64.NO_WRAP);
promise.resolve(base64Content);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void moveFile(String filepath, String destPath, Promise promise) {
try {
File inFile = new File(filepath);
if (!inFile.renameTo(new File(destPath))) {
copyFile(filepath, destPath);
inFile.delete();
}
promise.resolve(true);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void copyFile(String filepath, String destPath, Promise promise) {
try {
copyFile(filepath, destPath);
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
private void copyFile(String filepath, String destPath) throws IOException {
InputStream in = new FileInputStream(filepath);
OutputStream out = new FileOutputStream(destPath);
byte[] buffer = new byte[1024];
int length;
while ((length = in.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
in.close();
out.close();
}
@ReactMethod
public void readDir(String directory, Promise promise) {
try {
File file = new File(directory);
if (!file.exists()) throw new Exception("Folder does not exist");
File[] files = file.listFiles();
WritableArray fileMaps = Arguments.createArray();
for (File childFile : files) {
WritableMap fileMap = Arguments.createMap();
fileMap.putString("name", childFile.getName());
fileMap.putString("path", childFile.getAbsolutePath());
fileMap.putInt("size", (int)childFile.length());
fileMap.putInt("type", childFile.isDirectory() ? 1 : 0);
fileMaps.pushMap(fileMap);
}
promise.resolve(fileMaps);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, directory, ex);
}
}
@ReactMethod
public void stat(String filepath, Promise promise) {
try {
File file = new File(filepath);
if (!file.exists()) throw new Exception("File does not exist");
WritableMap statMap = Arguments.createMap();
statMap.putInt("ctime", (int)(file.lastModified() / 1000));
statMap.putInt("mtime", (int)(file.lastModified() / 1000));
statMap.putInt("size", (int)file.length());
statMap.putInt("type", file.isDirectory() ? 1 : 0);
promise.resolve(statMap);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void unlink(String filepath, Promise promise) {
try {
File file = new File(filepath);
if (!file.exists()) throw new Exception("File does not exist");
DeleteRecursive(file);
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
private void DeleteRecursive(File fileOrDirectory) {
if (fileOrDirectory.isDirectory()) {
for (File child : fileOrDirectory.listFiles()) {
DeleteRecursive(child);
}
}
fileOrDirectory.delete();
}
@ReactMethod
public void mkdir(String filepath, ReadableMap options, Promise promise) {
try {
File file = new File(filepath);
file.mkdirs();
boolean exists = file.exists();
if (!exists) throw new Exception("Directory could not be created");
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
private void sendEvent(ReactContext reactContext, String eventName, @Nullable WritableMap params) {
reactContext
.getJSModule(RCTNativeAppEventEmitter.class)
.emit(eventName, params);
}
@ReactMethod
public void downloadFile(final ReadableMap options, final Promise promise) {
try {
File file = new File(options.getString("toFile"));
URL url = new URL(options.getString("fromUrl"));
final int jobId = options.getInt("jobId");
ReadableMap headers = options.getMap("headers");
int progressDivider = options.getInt("progressDivider");
DownloadParams params = new DownloadParams();
params.src = url;
params.dest = file;
params.headers = headers;
params.progressDivider = progressDivider;
params.onTaskCompleted = new DownloadParams.OnTaskCompleted() {
public void onTaskCompleted(DownloadResult res) {
if (res.exception == null) {
WritableMap infoMap = Arguments.createMap();
infoMap.putInt("jobId", jobId);
infoMap.putInt("statusCode", res.statusCode);
infoMap.putInt("bytesWritten", res.bytesWritten);
promise.resolve(infoMap);
} else {
reject(promise, options.getString("toFile"), res.exception);
}
}
};
params.onDownloadBegin = new DownloadParams.OnDownloadBegin() {
public void onDownloadBegin(int statusCode, int contentLength, Map<String, String> headers) {
WritableMap headersMap = Arguments.createMap();
for (Map.Entry<String, String> entry : headers.entrySet()) {
headersMap.putString(entry.getKey(), entry.getValue());
}
WritableMap data = Arguments.createMap();
data.putInt("jobId", jobId);
data.putInt("statusCode", statusCode);
data.putInt("contentLength", contentLength);
data.putMap("headers", headersMap);
sendEvent(getReactApplicationContext(), "DownloadBegin-" + jobId, data);
}
};
params.onDownloadProgress = new DownloadParams.OnDownloadProgress() {
public void onDownloadProgress(int contentLength, int bytesWritten) {
WritableMap data = Arguments.createMap();
data.putInt("jobId", jobId);
data.putInt("contentLength", contentLength);
data.putInt("bytesWritten", bytesWritten);
sendEvent(getReactApplicationContext(), "DownloadProgress-" + jobId, data);
}
};
Downloader downloader = new Downloader();
downloader.execute(params);
this.downloaders.put(jobId, downloader);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, options.getString("toFile"), ex);
}
}
@ReactMethod
public void stopDownload(int jobId) {
Downloader downloader = this.downloaders.get(jobId);
if (downloader != null) {
downloader.stop();
}
}
@ReactMethod
public void pathForBundle(String bundleNamed, Promise promise) {
// TODO: Not sure what equilivent would be?
}
@ReactMethod
public void getFSInfo(Promise promise) {
File path = Environment.getDataDirectory();
StatFs stat = new StatFs(path.getPath());
long totalSpace;
long freeSpace;
if (android.os.Build.VERSION.SDK_INT >= 18) {
totalSpace = stat.getTotalBytes();
freeSpace = stat.getFreeBytes();
} else {
long blockSize = stat.getBlockSize();
totalSpace = blockSize * stat.getBlockCount();
freeSpace = blockSize * stat.getAvailableBlocks();
}
WritableMap info = Arguments.createMap();
info.putDouble("totalSpace", (double)totalSpace); // Int32 too small, must use Double
info.putDouble("freeSpace", (double)freeSpace);
promise.resolve(info);
}
private void reject(Promise promise, String filepath, Exception ex) {
if (ex instanceof FileNotFoundException) {
rejectFileNotFound(promise, filepath);
return;
}
promise.reject(null, ex.getMessage());
}
private void rejectFileNotFound(Promise promise, String filepath) {
promise.reject("ENOENT", "ENOENT: no such file or directory, open '" + filepath + "'");
}
private void rejectFileIsDirectory(Promise promise) {
promise.reject("EISDIR", "EISDIR: illegal operation on a directory, read");
}
@Override
public Map<String, Object> getConstants() {
final Map<String, Object> constants = new HashMap<>();
constants.put(NSDocumentDirectory, 0);
constants.put(NSDocumentDirectoryPath, this.getReactApplicationContext().getFilesDir().getAbsolutePath());
constants.put(NSTemporaryDirectoryPath, null);
File externalDirectory = this.getReactApplicationContext().getExternalFilesDir(null);
if (externalDirectory != null) {
constants.put(NSExternalDirectoryPath, externalDirectory.getAbsolutePath());
} else {
constants.put(NSExternalDirectoryPath, null);
}
constants.put(NSPicturesDirectoryPath, Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath());
constants.put(NSCachesDirectoryPath, this.getReactApplicationContext().getCacheDir().getAbsolutePath());
constants.put(NSFileTypeRegular, 0);
constants.put(NSFileTypeDirectory, 1);
return constants;
}
}
|
android/src/main/java/com/rnfs/RNFSManager.java
|
package com.rnfs;
import java.util.Map;
import java.util.HashMap;
import android.os.Environment;
import android.os.StatFs;
import android.util.Base64;
import android.support.annotation.Nullable;
import android.util.SparseArray;
import java.io.File;
import java.io.OutputStream;
import java.io.InputStream;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.net.URL;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.modules.core.RCTNativeAppEventEmitter;
public class RNFSManager extends ReactContextBaseJavaModule {
private static final String NSDocumentDirectoryPath = "NSDocumentDirectoryPath";
private static final String NSExternalDirectoryPath = "NSExternalDirectoryPath";
private static final String NSPicturesDirectoryPath = "NSPicturesDirectoryPath";
private static final String NSTemporaryDirectoryPath = "NSTemporaryDirectoryPath";
private static final String NSCachesDirectoryPath = "NSCachesDirectoryPath";
private static final String NSDocumentDirectory = "NSDocumentDirectory";
private static final String NSFileTypeRegular = "NSFileTypeRegular";
private static final String NSFileTypeDirectory = "NSFileTypeDirectory";
private SparseArray<Downloader> downloaders = new SparseArray<Downloader>();
public RNFSManager(ReactApplicationContext reactContext) {
super(reactContext);
}
@Override
public String getName() {
return "RNFSManager";
}
@ReactMethod
public void writeFile(String filepath, String base64Content, Promise promise) {
try {
byte[] bytes = Base64.decode(base64Content, Base64.DEFAULT);
FileOutputStream outputStream = new FileOutputStream(filepath, false);
outputStream.write(bytes);
outputStream.close();
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void appendFile(String filepath, String base64Content, Promise promise) {
try {
byte[] bytes = Base64.decode(base64Content, Base64.DEFAULT);
FileOutputStream outputStream = new FileOutputStream(filepath, true);
outputStream.write(bytes);
outputStream.close();
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void exists(String filepath, Promise promise) {
try {
File file = new File(filepath);
promise.resolve(file.exists());
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void readFile(String filepath, Promise promise) {
try {
File file = new File(filepath);
if (file.isDirectory()) {
rejectFileIsDirectory(promise);
return;
}
if (!file.exists()) {
rejectFileNotFound(promise, filepath);
return;
}
FileInputStream inputStream = new FileInputStream(filepath);
byte[] buffer = new byte[(int)file.length()];
inputStream.read(buffer);
String base64Content = Base64.encodeToString(buffer, Base64.NO_WRAP);
promise.resolve(base64Content);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void moveFile(String filepath, String destPath, Promise promise) {
try {
File from = new File(filepath);
File to = new File(destPath);
from.renameTo(to);
promise.resolve(true);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void copyFile(String filepath, String destPath, Promise promise) {
try {
InputStream in = new FileInputStream(filepath);
OutputStream out = new FileOutputStream(destPath);
byte[] buffer = new byte[1024];
int length;
while ((length = in.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
in.close();
out.close();
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void readDir(String directory, Promise promise) {
try {
File file = new File(directory);
if (!file.exists()) throw new Exception("Folder does not exist");
File[] files = file.listFiles();
WritableArray fileMaps = Arguments.createArray();
for (File childFile : files) {
WritableMap fileMap = Arguments.createMap();
fileMap.putString("name", childFile.getName());
fileMap.putString("path", childFile.getAbsolutePath());
fileMap.putInt("size", (int)childFile.length());
fileMap.putInt("type", childFile.isDirectory() ? 1 : 0);
fileMaps.pushMap(fileMap);
}
promise.resolve(fileMaps);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, directory, ex);
}
}
@ReactMethod
public void stat(String filepath, Promise promise) {
try {
File file = new File(filepath);
if (!file.exists()) throw new Exception("File does not exist");
WritableMap statMap = Arguments.createMap();
statMap.putInt("ctime", (int)(file.lastModified() / 1000));
statMap.putInt("mtime", (int)(file.lastModified() / 1000));
statMap.putInt("size", (int)file.length());
statMap.putInt("type", file.isDirectory() ? 1 : 0);
promise.resolve(statMap);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
@ReactMethod
public void unlink(String filepath, Promise promise) {
try {
File file = new File(filepath);
if (!file.exists()) throw new Exception("File does not exist");
DeleteRecursive(file);
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
private void DeleteRecursive(File fileOrDirectory) {
if (fileOrDirectory.isDirectory()) {
for (File child : fileOrDirectory.listFiles()) {
DeleteRecursive(child);
}
}
fileOrDirectory.delete();
}
@ReactMethod
public void mkdir(String filepath, ReadableMap options, Promise promise) {
try {
File file = new File(filepath);
file.mkdirs();
boolean exists = file.exists();
if (!exists) throw new Exception("Directory could not be created");
promise.resolve(null);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, filepath, ex);
}
}
private void sendEvent(ReactContext reactContext, String eventName, @Nullable WritableMap params) {
reactContext
.getJSModule(RCTNativeAppEventEmitter.class)
.emit(eventName, params);
}
@ReactMethod
public void downloadFile(final ReadableMap options, final Promise promise) {
try {
File file = new File(options.getString("toFile"));
URL url = new URL(options.getString("fromUrl"));
final int jobId = options.getInt("jobId");
ReadableMap headers = options.getMap("headers");
int progressDivider = options.getInt("progressDivider");
DownloadParams params = new DownloadParams();
params.src = url;
params.dest = file;
params.headers = headers;
params.progressDivider = progressDivider;
params.onTaskCompleted = new DownloadParams.OnTaskCompleted() {
public void onTaskCompleted(DownloadResult res) {
if (res.exception == null) {
WritableMap infoMap = Arguments.createMap();
infoMap.putInt("jobId", jobId);
infoMap.putInt("statusCode", res.statusCode);
infoMap.putInt("bytesWritten", res.bytesWritten);
promise.resolve(infoMap);
} else {
reject(promise, options.getString("toFile"), res.exception);
}
}
};
params.onDownloadBegin = new DownloadParams.OnDownloadBegin() {
public void onDownloadBegin(int statusCode, int contentLength, Map<String, String> headers) {
WritableMap headersMap = Arguments.createMap();
for (Map.Entry<String, String> entry : headers.entrySet()) {
headersMap.putString(entry.getKey(), entry.getValue());
}
WritableMap data = Arguments.createMap();
data.putInt("jobId", jobId);
data.putInt("statusCode", statusCode);
data.putInt("contentLength", contentLength);
data.putMap("headers", headersMap);
sendEvent(getReactApplicationContext(), "DownloadBegin-" + jobId, data);
}
};
params.onDownloadProgress = new DownloadParams.OnDownloadProgress() {
public void onDownloadProgress(int contentLength, int bytesWritten) {
WritableMap data = Arguments.createMap();
data.putInt("jobId", jobId);
data.putInt("contentLength", contentLength);
data.putInt("bytesWritten", bytesWritten);
sendEvent(getReactApplicationContext(), "DownloadProgress-" + jobId, data);
}
};
Downloader downloader = new Downloader();
downloader.execute(params);
this.downloaders.put(jobId, downloader);
} catch (Exception ex) {
ex.printStackTrace();
reject(promise, options.getString("toFile"), ex);
}
}
@ReactMethod
public void stopDownload(int jobId) {
Downloader downloader = this.downloaders.get(jobId);
if (downloader != null) {
downloader.stop();
}
}
@ReactMethod
public void pathForBundle(String bundleNamed, Promise promise) {
// TODO: Not sure what equilivent would be?
}
@ReactMethod
public void getFSInfo(Promise promise) {
File path = Environment.getDataDirectory();
StatFs stat = new StatFs(path.getPath());
long totalSpace;
long freeSpace;
if (android.os.Build.VERSION.SDK_INT >= 18) {
totalSpace = stat.getTotalBytes();
freeSpace = stat.getFreeBytes();
} else {
long blockSize = stat.getBlockSize();
totalSpace = blockSize * stat.getBlockCount();
freeSpace = blockSize * stat.getAvailableBlocks();
}
WritableMap info = Arguments.createMap();
info.putDouble("totalSpace", (double)totalSpace); // Int32 too small, must use Double
info.putDouble("freeSpace", (double)freeSpace);
promise.resolve(info);
}
private void reject(Promise promise, String filepath, Exception ex) {
if (ex instanceof FileNotFoundException) {
rejectFileNotFound(promise, filepath);
return;
}
promise.reject(null, ex.getMessage());
}
private void rejectFileNotFound(Promise promise, String filepath) {
promise.reject("ENOENT", "ENOENT: no such file or directory, open '" + filepath + "'");
}
private void rejectFileIsDirectory(Promise promise) {
promise.reject("EISDIR", "EISDIR: illegal operation on a directory, read");
}
@Override
public Map<String, Object> getConstants() {
final Map<String, Object> constants = new HashMap<>();
constants.put(NSDocumentDirectory, 0);
constants.put(NSDocumentDirectoryPath, this.getReactApplicationContext().getFilesDir().getAbsolutePath());
constants.put(NSTemporaryDirectoryPath, null);
File externalDirectory = this.getReactApplicationContext().getExternalFilesDir(null);
if (externalDirectory != null) {
constants.put(NSExternalDirectoryPath, externalDirectory.getAbsolutePath());
} else {
constants.put(NSExternalDirectoryPath, null);
}
constants.put(NSPicturesDirectoryPath, Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath());
constants.put(NSCachesDirectoryPath, this.getReactApplicationContext().getCacheDir().getAbsolutePath());
constants.put(NSFileTypeRegular, 0);
constants.put(NSFileTypeDirectory, 1);
return constants;
}
}
|
move-file fallback
|
android/src/main/java/com/rnfs/RNFSManager.java
|
move-file fallback
|
|
Java
|
mit
|
b3a6496049483b42ebcc34db2e06329032d363c1
| 0
|
AlmasB/FXGL,AlmasB/FXGL,AlmasB/FXGL,AlmasB/FXGL
|
/*
* FXGL - JavaFX Game Library. The MIT License (MIT).
* Copyright (c) AlmasB (almaslvl@gmail.com).
* See LICENSE for details.
*/
package sandbox.particles;
import com.almasb.fxgl.animation.AnimatedValue;
import com.almasb.fxgl.animation.Interpolators;
import com.almasb.fxgl.app.GameApplication;
import com.almasb.fxgl.app.GameSettings;
import com.almasb.fxgl.core.math.FXGLMath;
import com.almasb.fxgl.core.math.Vec2;
import com.almasb.fxgl.texture.ImagesKt;
import javafx.geometry.Point2D;
import javafx.geometry.Rectangle2D;
import javafx.scene.Node;
import javafx.scene.input.KeyCode;
import javafx.scene.paint.Color;
import javafx.scene.shape.CubicCurve;
import javafx.scene.shape.Rectangle;
import javafx.util.Duration;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import static com.almasb.fxgl.dsl.FXGL.*;
/**
* @author Almas Baimagambetov (almaslvl@gmail.com)
*/
public class CrystalApp2 extends GameApplication {
@Override
protected void initSettings(GameSettings settings) {
settings.setTitle("Crystal Chase");
settings.setWidthFromRatio(16/9.0);
settings.setManualResizeEnabled(true);
}
private List<Node> pixels;
double delayIndex = 0;
@Override
protected void initInput() {
onKeyDown(KeyCode.F, () -> {
delayIndex = 0.0;
var point = FXGLMath.randomPoint(new Rectangle2D(0, 0, getAppWidth() - 200, getAppHeight() - 200));
pixels.stream()
.sorted(Comparator.comparingDouble(p -> p.getLayoutY()))
.forEach(p -> {
animationBuilder()
.duration(Duration.seconds(0.2))
.interpolator(Interpolators.EXPONENTIAL.EASE_OUT())
.onFinished(() -> {
animationBuilder()
.delay(Duration.seconds(random(0.1, 0.6)))
.onFinished(() -> {
animationBuilder()
.delay(Duration.seconds(random(delayIndex, delayIndex + 0.1)))
.duration(Duration.seconds(1.5))
.interpolator(Interpolators.ELASTIC.EASE_OUT())
.translate(p)
.from(new Point2D(p.getTranslateX(), p.getTranslateY()))
.to(point)
// .alongPath(new CubicCurve(p.getTranslateX(), p.getTranslateY(),
// random(200, 600), random(-200, 220),
// random(500, 700), random(300, 700),
// 650, 150))
.buildAndPlay();
delayIndex += 0.0001;
})
.duration(Duration.seconds(0.75))
.interpolator(Interpolators.RANDOM.EASE_OUT())
.animate(new AnimatedValue<>(0.0, 1.0))
.onProgress(progress -> {
var x = p.getTranslateX();
var y = p.getTranslateY();
var noiseValue = FXGLMath.noise2D(x * 0.002 * progress, y * 0.002 * t);
var angle = FXGLMath.toDegrees((noiseValue + 1) * Math.PI * random(1.0, 6.0));
angle %= 360.0;
var v = Vec2.fromAngle(angle).normalizeLocal().mulLocal(FXGLMath.random(1.0, 25));
Vec2 velocity = (Vec2) p.getProperties().get("vel");
var vx = velocity.x * 0.8f + v.x * 0.2f;
var vy = velocity.y * 0.8f + v.y * 0.2f;
velocity.x = vx;
velocity.y = vy;
p.setTranslateX(x + velocity.x);
p.setTranslateY(y + velocity.y);
})
.buildAndPlay();
})
.scale(p)
.from(new Point2D(1, 1))
.to(new Point2D(3, 3))
.buildAndPlay();
});
});
}
@Override
protected void initGame() {
getGameScene().setBackgroundColor(Color.BLACK);
var texture = texture("anim/Attack (1).png", 430 / 2.0, 519 / 2.0);
pixels = ImagesKt.toPixels(texture.getImage())
.stream()
.filter(p -> !p.getColor().equals(Color.TRANSPARENT))
.map(p -> {
var r = new Rectangle(1, 1, p.getColor());
r.setLayoutX(p.getX());
r.setLayoutY(p.getY());
return r;
})
.collect(Collectors.toList());
pixels.forEach(p -> {
p.getProperties().put("vel", new Vec2());
addUINode(p, 250, 150);
});
}
double t = 0.0;
boolean up = true;
@Override
protected void onUpdate(double tpf) {
if (up) {
t += tpf;
} else {
t -= tpf;
}
if (t > 7) {
up = false;
}
if (t < 1) {
up = true;
}
}
public static void main(String[] args) {
launch(args);
}
}
|
fxgl-samples/src/main/java/sandbox/particles/CrystalApp2.java
|
/*
* FXGL - JavaFX Game Library. The MIT License (MIT).
* Copyright (c) AlmasB (almaslvl@gmail.com).
* See LICENSE for details.
*/
package sandbox.particles;
import com.almasb.fxgl.animation.AnimatedValue;
import com.almasb.fxgl.animation.Interpolators;
import com.almasb.fxgl.app.GameApplication;
import com.almasb.fxgl.app.GameSettings;
import com.almasb.fxgl.core.math.FXGLMath;
import com.almasb.fxgl.core.math.Vec2;
import com.almasb.fxgl.dsl.components.DraggableComponent;
import com.almasb.fxgl.texture.ImagesKt;
import javafx.geometry.Point2D;
import javafx.geometry.Rectangle2D;
import javafx.scene.Node;
import javafx.scene.input.KeyCode;
import javafx.scene.input.MouseEvent;
import javafx.scene.paint.Color;
import javafx.scene.shape.CubicCurve;
import javafx.scene.shape.Rectangle;
import javafx.util.Duration;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import static com.almasb.fxgl.dsl.FXGL.*;
/**
* @author Almas Baimagambetov (almaslvl@gmail.com)
*/
public class CrystalApp2 extends GameApplication {
private enum Type {
PLAYER, CRYSTAL
}
@Override
protected void initSettings(GameSettings settings) {
settings.setTitle("Crystal Chase");
settings.setWidthFromRatio(16/9.0);
settings.setManualResizeEnabled(true);
}
private List<Node> pixels;
double delayIndex = 0;
@Override
protected void initInput() {
onKeyDown(KeyCode.F, () -> {
delayIndex = 0.0;
pixels.stream()
.sorted(Comparator.comparingDouble(p -> p.getLayoutY()))
.forEach(p -> {
//p.setBlendMode(BlendMode.ADD);
animationBuilder()
.duration(Duration.seconds(0.2))
.interpolator(Interpolators.EXPONENTIAL.EASE_OUT())
.onFinished(() -> {
animationBuilder()
.delay(Duration.seconds(random(0.1, 0.6)))
.onFinished(() -> {
animationBuilder()
.delay(Duration.seconds(random(delayIndex, delayIndex + 0.1)))
.duration(Duration.seconds(1.5))
.interpolator(Interpolators.EXPONENTIAL.EASE_IN())
.translate(p)
.alongPath(new CubicCurve(p.getTranslateX(), p.getTranslateY(),
random(200, 300), random(-200, 20),
random(600, 700), random(500, 700),
650, 150))
.buildAndPlay();
delayIndex += 0.0001;
})
.duration(Duration.seconds(0.75))
.interpolator(Interpolators.BOUNCE.EASE_OUT())
.animate(new AnimatedValue<>(0.0, 1.0))
.onProgress(progress -> {
var x = p.getTranslateX();
var y = p.getTranslateY();
var noiseValue = FXGLMath.noise2D(x * 0.002 * progress, y * 0.002 * t);
var angle = FXGLMath.toDegrees((noiseValue + 1) * Math.PI * 1.5);
angle %= 360.0;
var v = Vec2.fromAngle(angle).normalizeLocal().mulLocal(FXGLMath.random(1.0, 25));
Vec2 velocity = (Vec2) p.getProperties().get("vel");
var vx = velocity.x * 0.8f + v.x * 0.2f;
var vy = velocity.y * 0.8f + v.y * 0.2f;
velocity.x = vx;
velocity.y = vy;
p.setTranslateX(x + velocity.x);
p.setTranslateY(y + velocity.y);
})
.buildAndPlay();
})
.scale(p)
.from(new Point2D(1, 1))
.to(new Point2D(3, 3))
.buildAndPlay();
});
// pixels.parallelStream()
// .forEach(p -> {
//
// });
});
}
@Override
protected void initGame() {
getGameScene().setBackgroundColor(Color.BLACK);
var texture = texture("robot_stand.png").subTexture(new Rectangle2D(50, 30, 200, 220));
pixels = ImagesKt.toPixels(texture.getImage())
.stream()
.filter(p -> !p.getColor().equals(Color.TRANSPARENT))
.map(p -> {
var r = new Rectangle(1, 1, p.getColor());
r.setLayoutX(p.getX());
r.setLayoutY(p.getY());
return r;
})
.collect(Collectors.toList());
pixels.forEach(p -> {
p.getProperties().put("vel", new Vec2());
addUINode(p, 250, 150);
});
}
double t = 0.0;
boolean up = true;
@Override
protected void onUpdate(double tpf) {
if (up) {
t += tpf;
} else {
t -= tpf;
}
if (t > 7) {
up = false;
}
if (t < 1) {
up = true;
}
// perlin noise
// pixels.forEach(p -> {
// var x = p.getTranslateX();
// var y = p.getTranslateY();
//
// var noiseValue = FXGLMath.noise2D(x * 0.002 * t, y * 0.002 * t);
// var angle = FXGLMath.toDegrees((noiseValue + 1) * Math.PI * 1.5);
//
// angle %= 360.0;
//
// var v = Vec2.fromAngle(angle).normalizeLocal().mulLocal(FXGLMath.random(1.0, 25));
//
// Vec2 velocity = (Vec2) p.getProperties().get("vel");
//
// var vx = velocity.x * 0.8f + v.x * 0.2f;
// var vy = velocity.y * 0.8f + v.y * 0.2f;
//
// velocity.x = vx;
// velocity.y = vy;
//
// p.setTranslateX(x + velocity.x);
// p.setTranslateY(y + velocity.y);
// });
}
private void spawnCrystal() {
var e = entityBuilder().at(FXGLMath.randomPoint(new Rectangle2D(0, 0, getAppWidth() - 55, getAppHeight() - 55)))
.type(Type.CRYSTAL)
.view(texture("YellowCrystal.png").toAnimatedTexture(8, Duration.seconds(0.66)).loop())
.with(new DraggableComponent())
.build();
e.getViewComponent().addEventHandler(MouseEvent.MOUSE_CLICKED, ev -> {
despawnWithScale(e, Duration.seconds(1), Interpolators.ELASTIC.EASE_IN());
});
spawnWithScale(e, Duration.seconds(1), Interpolators.ELASTIC.EASE_OUT());
}
public static void main(String[] args) {
launch(args);
}
}
|
refactor
|
fxgl-samples/src/main/java/sandbox/particles/CrystalApp2.java
|
refactor
|
|
Java
|
lgpl-2.1
|
0e8d57d73f7fce904eea795bd12d121e9f96f55c
| 0
|
ekiwi/jade-mirror,ekiwi/jade-mirror,ekiwi/jade-mirror,ekiwi/jade-mirror
|
/*****************************************************************
JADE - Java Agent DEvelopment Framework is a framework to develop
multi-agent systems in compliance with the FIPA specifications.
Copyright (C) 2000 CSELT S.p.A.
The updating of this file to JADE 2.0 has been partially supported by the IST-1999-10211 LEAP Project
GNU Lesser General Public License
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation,
version 2.1 of the License.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
*****************************************************************/
package jade.domain;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.BufferedWriter;
import java.io.OutputStreamWriter;
import java.io.FileWriter;
import java.net.InetAddress;
import jade.util.leap.Iterator;
import jade.util.leap.List;
import jade.util.leap.ArrayList;
import jade.util.leap.Set;
import jade.util.leap.Map;
import jade.util.leap.HashMap;
import jade.core.*;
import jade.core.behaviours.*;
import jade.core.event.PlatformEvent;
import jade.core.event.MTPEvent;
import jade.domain.FIPAAgentManagement.*;
import jade.domain.JADEAgentManagement.*;
import jade.domain.introspection.*;
import jade.lang.acl.ACLMessage;
import jade.lang.acl.MessageTemplate;
import jade.lang.Codec;
import jade.lang.sl.SL0Codec;
import jade.onto.Ontology;
import jade.onto.OntologyException;
import jade.onto.Frame;
import jade.onto.basic.Action;
import jade.onto.basic.BasicOntology;
import jade.onto.basic.ResultPredicate;
import jade.onto.basic.DonePredicate;
import jade.onto.basic.TrueProposition;
import jade.mtp.MTPException;
import jade.proto.FipaRequestResponderBehaviour;
//__JADE_ONLY__BEGIN
import jade.security.AgentPrincipal;
import jade.security.UserPrincipal;
//__JADE_ONLY__END
import jade.security.JADESecurityException;
/**
Standard <em>Agent Management System</em> agent. This class
implements <em><b>FIPA</b></em> <em>AMS</em> agent. <b>JADE</b>
applications cannot use this class directly, but interact with it
through <em>ACL</em> message passing.
@author Giovanni Rimassa - Universita` di Parma
@version $Date$ $Revision$
*/
public class ams extends Agent implements AgentManager.Listener {
private abstract class AMSBehaviour
extends FipaRequestResponderBehaviour.ActionHandler
implements FipaRequestResponderBehaviour.Factory {
protected AMSBehaviour(ACLMessage req) {
super(ams.this,req);
}
/**
* Create the content for the AGREE message
* @param a is the action that has been agreed to perform
* @return a String with the content ready to be set into the message
**/
protected String createAgreeContent(Action a) {
ACLMessage temp = new ACLMessage(ACLMessage.AGREE);
temp.setLanguage(getRequest().getLanguage());
temp.setOntology(getRequest().getOntology());
List l = new ArrayList(2);
if (a == null) {
a = new Action();
a.set_0(getAID());
a.set_1("UnknownAction");
}
l.add(a);
l.add(new TrueProposition());
try {
fillMsgContent(temp,l);
} catch (Exception ee) { // in any case try to return some good content
return "( true )";
}
return temp.getContent();
}
/**
* Create the content for a so-called "exceptional" message, i.e.
* one of NOT_UNDERSTOOD, FAILURE, REFUSE message
* @param a is the Action that generated the exception
* @param e is the generated Exception
* @return a String containing the content to be sent back in the reply
* message; in case an exception is thrown somewhere, the method
* try to return anyway a valid content with a best-effort strategy
**/
protected String createExceptionalMsgContent(Action a, String ontoName, FIPAException e) {
ACLMessage temp = new ACLMessage(ACLMessage.NOT_UNDERSTOOD);
temp.setLanguage(SL0Codec.NAME);
temp.setOntology(ontoName);
List l = new ArrayList(2);
if (a == null) {
a = new Action();
a.set_0(getAID());
a.set_1("UnknownAction");
}
l.add(a);
l.add(e);
try {
fillMsgContent(temp,l);
} catch (Exception ee) { // in any case try to return some good content
return e.getMessage();
}
return temp.getContent();
}
// Each concrete subclass will implement this deferred method to
// do action-specific work
protected abstract void processAction(Action a) throws FIPAException, JADESecurityException;
public void action() {
Action a = null;
try {
ACLMessage msg = getRequest();
List l = myAgent.extractMsgContent(msg);
a = (Action)l.get(0);
// Do real action, deferred to subclasses
processAction(a);
}
catch (FIPAException fe) {
String ontoName = getRequest().getOntology();
sendReply((fe instanceof FailureException?ACLMessage.FAILURE:ACLMessage.REFUSE),createExceptionalMsgContent(a, ontoName, fe));
}
catch (JADESecurityException se) {
sendReply(ACLMessage.REFUSE, "(" + se.getMessage() + ")");
}
}
/**
Writes the <code>Done</code> predicate for the specific action
into the result <code>String</code> object, encoded in SL0.
*/
protected String doneAction(Action a) throws FIPAException {
try {
Ontology o = lookupOntology(getRequest().getOntology());
DonePredicate dp = new DonePredicate();
dp.set_0(a);
Frame f = o.createFrame(dp, BasicOntology.DONE);
List l = new ArrayList(1);
l.add(f);
Codec c = lookupLanguage(SL0Codec.NAME);
String result = c.encode(l, o);
return result;
}
catch(OntologyException oe) {
oe.printStackTrace();
throw new FIPAException("Internal error in building Done predicate.");
}
}
public boolean done() {
return true;
}
public void reset() {
}
} // End of AMSBehaviour class
// These four concrete classes serve both as a Factory and as an
// Action: when seen as Factory they can spawn a new
// Behaviour to process a given request, and when seen as
// Action they process their request and terminate.
private class RegBehaviour extends AMSBehaviour {
public RegBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new RegBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException, JADESecurityException {
Register r = (Register)a.getAction();
AMSAgentDescription amsd = (AMSAgentDescription)r.get_0();
// This agent was created by some other, which is still
// waiting for an 'inform' message. Recover the buffered
// message from the Map and send it back.
ACLMessage informCreator = (ACLMessage)pendingInforms.remove(amsd.getName());
//The message in pendingInforms can be registered with only the localName
//without the platformID
if(informCreator == null) {
String name = amsd.getName().getName();
int atPos = name.lastIndexOf('@');
if(atPos > 0) {
name = name.substring(0, atPos);
informCreator = (ACLMessage)pendingInforms.remove(name);
}
}
try {
// Write new agent data in AMS Agent Table
AMSRegister(amsd);
sendReply(ACLMessage.AGREE,createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
// Inform agent creator that registration was successful.
if(informCreator != null) {
send(informCreator);
}
}
catch(AlreadyRegistered are) {
sendReply(ACLMessage.AGREE, createAgreeContent(a));
String ontoName = getRequest().getOntology();
sendReply(ACLMessage.FAILURE,createExceptionalMsgContent(a, ontoName, are));
// Inform agent creator that registration failed.
if(informCreator != null) {
informCreator.setPerformative(ACLMessage.FAILURE);
informCreator.setContent(createExceptionalMsgContent(a, ontoName, are));
send(informCreator);
}
}
}
} // End of RegBehaviour class
private class DeregBehaviour extends AMSBehaviour {
public DeregBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new DeregBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
Deregister d = (Deregister)a.getAction();
AMSAgentDescription amsd = (AMSAgentDescription)d.get_0();
AMSDeregister(amsd);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM,doneAction(a));
}
} // End of DeregBehaviour class
private class ModBehaviour extends AMSBehaviour {
public ModBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new ModBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException, JADESecurityException {
Modify m = (Modify)a.getAction();
AMSAgentDescription amsd = (AMSAgentDescription)m.get_0();
AMSModify(amsd);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM,doneAction(a));
}
} // End of ModBehaviour class
private class SrchBehaviour extends AMSBehaviour {
public SrchBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new SrchBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
Search s = (Search)a.getAction();
AMSAgentDescription amsd = (AMSAgentDescription)s.get_0();
SearchConstraints constraints = s.get_1();
List l = AMSSearch(amsd, constraints, getReply());
sendReply(ACLMessage.AGREE,createAgreeContent(a));
ACLMessage msg = getRequest().createReply();
msg.setPerformative(ACLMessage.INFORM);
ResultPredicate r = new ResultPredicate();
r.set_0(a);
for (int i=0; i<l.size(); i++)
r.add_1(l.get(i));
l.clear();
l.add(r);
fillMsgContent(msg,l);
send(msg);
}
} // End of SrchBehaviour class
private class GetDescriptionBehaviour extends AMSBehaviour {
public GetDescriptionBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new GetDescriptionBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
sendReply(ACLMessage.AGREE, createAgreeContent(a));
ACLMessage reply = getReply();
reply.setPerformative(ACLMessage.INFORM);
List l = new ArrayList(1);
ResultPredicate rp = new ResultPredicate();
rp.set_0(a);
rp.add_1(theProfile);
ArrayList list = new ArrayList(1);
list.add(rp);
fillMsgContent(reply,list);
send(reply);
}
} // End of GetDescriptionBehaviour class
// These Behaviours handle interactions with platform tools.
private class RegisterToolBehaviour extends CyclicBehaviour {
private MessageTemplate subscriptionTemplate;
RegisterToolBehaviour() {
MessageTemplate mt1 = MessageTemplate.MatchLanguage(SL0Codec.NAME);
MessageTemplate mt2 = MessageTemplate.MatchOntology(JADEIntrospectionOntology.NAME);
MessageTemplate mt12 = MessageTemplate.and(mt1, mt2);
mt1 = MessageTemplate.MatchReplyWith("tool-subscription");
mt2 = MessageTemplate.MatchPerformative(ACLMessage.SUBSCRIBE);
subscriptionTemplate = MessageTemplate.and(mt1, mt2);
subscriptionTemplate = MessageTemplate.and(subscriptionTemplate, mt12);
}
public void action() {
// Receive 'subscribe' ACL messages.
ACLMessage current = receive(subscriptionTemplate);
if(current != null) {
// FIXME: Should parse 'iota ?x ...'
// Get new tool name from subscription message
AID newTool = current.getSender();
try {
// Send back the whole container list.
ContainerID[] ids = myPlatform.containerIDs();
for(int i = 0; i < ids.length; i++) {
ContainerID cid = ids[i];
AddedContainer ac = new AddedContainer();
ac.setContainer(cid);
EventRecord er = new EventRecord(ac, here());
Occurred o = new Occurred();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
toolNotification.clearAllReceiver();
toolNotification.addReceiver(newTool);
fillMsgContent(toolNotification, l);
send(toolNotification);
}
// Send all agent names, along with their container name.
AID[] agents = myPlatform.agentNames();
for(int i = 0; i < agents.length; i++) {
AID agentName = agents[i];
ContainerID cid = myPlatform.getContainerID(agentName);
BornAgent ba = new BornAgent();
ba.setAgent(agentName);
ba.setWhere(cid);
EventRecord er = new EventRecord(ba, here());
Occurred o = new Occurred();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
toolNotification.clearAllReceiver();
toolNotification.addReceiver(newTool);
fillMsgContent(toolNotification, l);
send(toolNotification);
}
// Send the list of the installed MTPs
String[] addresses = myPlatform.platformAddresses();
for(int i = 0; i < addresses.length; i++) {
AddedMTP amtp = new AddedMTP();
amtp.setAddress(addresses[i]);
amtp.setWhere(new ContainerID(AgentManager.MAIN_CONTAINER_NAME, null)); // FIXME: should use AgentManager to know the container
EventRecord er = new EventRecord(amtp, here());
Occurred o = new Occurred();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
toolNotification.clearAllReceiver();
toolNotification.addReceiver(newTool);
fillMsgContent(toolNotification, l);
send(toolNotification);
}
//Notification to the RMA of the APDescription
PlatformDescription ap = new PlatformDescription();
ap.setPlatform(theProfile);
EventRecord er = new EventRecord(ap,here());
Occurred o = new Occurred();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
toolNotification.clearAllReceiver();
toolNotification.addReceiver(newTool);
fillMsgContent(toolNotification, l);
send(toolNotification);
// Add the new tool to tools list.
tools.add(newTool);
}
catch(NotFoundException nfe) {
nfe.printStackTrace();
}
catch(FIPAException fe) {
fe.printStackTrace();
}
}
else
block();
}
} // End of RegisterToolBehaviour class
private class DeregisterToolBehaviour extends CyclicBehaviour {
private MessageTemplate cancellationTemplate;
DeregisterToolBehaviour() {
MessageTemplate mt1 = MessageTemplate.MatchLanguage(SL0Codec.NAME);
MessageTemplate mt2 = MessageTemplate.MatchOntology(JADEIntrospectionOntology.NAME);
MessageTemplate mt12 = MessageTemplate.and(mt1, mt2);
mt1 = MessageTemplate.MatchReplyWith("tool-cancellation");
mt2 = MessageTemplate.MatchPerformative(ACLMessage.CANCEL);
cancellationTemplate = MessageTemplate.and(mt1, mt2);
cancellationTemplate = MessageTemplate.and(cancellationTemplate, mt12);
}
public void action() {
// Receive 'cancel' ACL messages.
ACLMessage current = receive(cancellationTemplate);
if(current != null) {
// FIXME: Should parse the content
// Remove this tool to tools agent group.
tools.remove(current.getSender());
}
else
block();
}
} // End of DeregisterToolBehaviour class
private class NotifyToolsBehaviour extends CyclicBehaviour {
public void action() {
synchronized(ams.this) { // Mutual exclusion with handleXXX() methods
// Look into the event buffer
Iterator it = eventQueue.iterator();
Occurred o = new Occurred();
while(it.hasNext()) {
// Write the event into the notification message
EventRecord er = (EventRecord)it.next();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
try {
fillMsgContent(toolNotification, l);
}
catch(FIPAException fe) {
fe.printStackTrace();
}
// Put all tools in the receiver list
toolNotification.clearAllReceiver();
Iterator toolIt = tools.iterator();
while(toolIt.hasNext()) {
AID tool = (AID)toolIt.next();
toolNotification.addReceiver(tool);
}
send(toolNotification);
it.remove();
}
}
block();
}
} // End of NotifyToolsBehaviour class
private class KillContainerBehaviour extends AMSBehaviour {
public KillContainerBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new KillContainerBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
KillContainer kc = (KillContainer)a.get_1();
ContainerID cid = kc.getContainer();
myPlatform.killContainer(cid);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM,doneAction(a));
}
} // End of KillContainerBehaviour class
private class CreateBehaviour extends AMSBehaviour {
public CreateBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new CreateBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException, JADESecurityException {
CreateAgent ca = (CreateAgent)a.get_1();
String agentName = ca.getAgentName();
String className = ca.getClassName();
ContainerID container = ca.getContainer();
Iterator arg = ca.getAllArguments(); //return an iterator of all arguments
//create the array of string
ArrayList listArg = new ArrayList();
while(arg.hasNext())
listArg.add(arg.next().toString());
String[] arguments = new String[listArg.size()];
for(int n = 0; n< listArg.size(); n++)
arguments[n] = (String)listArg.get(n);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
try {
myPlatform.create(agentName, className, arguments, container, null, null); //!!!
// An 'inform Done' message will be sent to the requester only
// when the newly created agent will register itself with the
// AMS. The new agent's name will be used as the key in the map.
ACLMessage reply = getReply();
reply = (ACLMessage)reply.clone();
reply.setPerformative(ACLMessage.INFORM);
reply.setContent(doneAction(a));
pendingInforms.put(agentName, reply);
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError(ue.getMessage());
}
}
} // End of CreateBehaviour class
private class KillBehaviour extends AMSBehaviour {
public KillBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new KillBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
// Kill an agent
KillAgent ka = (KillAgent)a.get_1();
AID agentID = ka.getAgent();
String password = ka.getPassword();
try {
myPlatform.kill(agentID, password);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of KillBehaviour class
private class SniffAgentOnBehaviour extends AMSBehaviour {
public SniffAgentOnBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new SniffAgentOnBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
SniffOn so = (SniffOn)a.get_1();
try {
myPlatform.sniffOn(so.getSniffer(), so.getCloneOfSniffedAgents());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of SniffAgentOnBehaviour class
private class SniffAgentOffBehaviour extends AMSBehaviour {
public SniffAgentOffBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new SniffAgentOffBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
SniffOff so = (SniffOff)a.get_1();
try {
myPlatform.sniffOff(so.getSniffer(), so.getCloneOfSniffedAgents());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM,doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of SniffAgentOffBehaviour class
private class DebugAgentOnBehaviour extends AMSBehaviour {
public DebugAgentOnBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new DebugAgentOnBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
DebugOn dbgOn = (DebugOn)a.get_1();
try {
myPlatform.debugOn(dbgOn.getDebugger(), dbgOn.getCloneOfDebuggedAgents());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of DebugAgentOnBehaviour class
private class DebugAgentOffBehaviour extends AMSBehaviour {
public DebugAgentOffBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new DebugAgentOffBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
DebugOff dbgOff = (DebugOff)a.get_1();
try {
myPlatform.debugOff(dbgOff.getDebugger(), dbgOff.getCloneOfDebuggedAgents());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of DebugAgentOffBehaviour class
private class InstallMTPBehaviour extends AMSBehaviour {
public InstallMTPBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new InstallMTPBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
InstallMTP imtp = (InstallMTP)a.get_1();
try {
myPlatform.installMTP(imtp.getAddress(), imtp.getContainer(), imtp.getClassName());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(NotFoundException nfe) {
throw new jade.domain.FIPAAgentManagement.UnrecognisedParameterValue("MTP", nfe.getMessage());
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError(ue.getMessage());
}
catch(MTPException mtpe) {
throw new jade.domain.FIPAAgentManagement.UnrecognisedParameterValue("MTP", mtpe.getMessage());
}
}
} // End of InstallMTPBehaviour class
private class UninstallMTPBehaviour extends AMSBehaviour {
public UninstallMTPBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new UninstallMTPBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
UninstallMTP umtp = (UninstallMTP)a.get_1();
try {
myPlatform.uninstallMTP(umtp.getAddress(), umtp.getContainer());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(NotFoundException nfe) {
throw new jade.domain.FIPAAgentManagement.UnrecognisedParameterValue("MTP", nfe.getMessage());
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError(ue.getMessage());
}
catch(MTPException mtpe) {
throw new jade.domain.FIPAAgentManagement.UnrecognisedParameterValue("MTP", mtpe.getMessage());
}
}
} // End of UninstallMTPBehaviour class
// The AgentPlatform where information about agents is stored
/**
@serial
*/
private AgentManager myPlatform;
// Maintains an association between action names and behaviours to
// handle 'fipa-agent-management' actions
/**
@serial
*/
private FipaRequestResponderBehaviour dispatcher;
// Maintains an association between action names and behaviours to
// handle 'jade-agent-management' actions
/**
@serial
*/
private FipaRequestResponderBehaviour extensionsDispatcher;
// Contains a main Behaviour and some utilities to handle JADE mobility
/**
@serial
*/
private MobilityManager mobilityMgr;
// Behaviour to listen to incoming 'subscribe' messages from tools.
/**
@serial
*/
private RegisterToolBehaviour registerTool;
// Behaviour to broadcats AgentPlatform notifications to each
// registered tool.
/**
@serial
*/
private NotifyToolsBehaviour notifyTools;
// Behaviour to listen to incoming 'cancel' messages from tools.
/**
@serial
*/
private DeregisterToolBehaviour deregisterTool;
// Group of tools registered with this AMS
/**
@serial
*/
private List tools;
// ACL Message to use for tool notification
/**
@serial
*/
private ACLMessage toolNotification = new ACLMessage(ACLMessage.INFORM);
// Buffer for AgentPlatform notifications
/**
@serial
*/
private List eventQueue = new ArrayList(10);
/**
@serial
*/
private Map pendingInforms = new HashMap();
/**
@serial
*/
private APDescription theProfile = new APDescription();
/**
This constructor creates a new <em>AMS</em> agent. Since a direct
reference to an Agent Platform implementation must be passed to
it, this constructor cannot be called from application
code. Therefore, no other <em>AMS</em> agent can be created
beyond the default one.
*/
public ams(AgentManager ap) {
// Fill Agent Platform Profile with data.
theProfile.setDynamic(new Boolean(false));
theProfile.setMobility(new Boolean(false));
APTransportDescription mtps = new APTransportDescription();
theProfile.setTransportProfile(mtps);
myPlatform = ap;
myPlatform.addListener(this);
MessageTemplate mtFIPA =
MessageTemplate.and(MessageTemplate.MatchLanguage(SL0Codec.NAME),
MessageTemplate.MatchOntology(FIPAAgentManagementOntology.NAME));
dispatcher = new FipaRequestResponderBehaviour(this, mtFIPA);
MessageTemplate mtJADE =
MessageTemplate.and(MessageTemplate.MatchLanguage(SL0Codec.NAME),
MessageTemplate.MatchOntology(JADEAgentManagementOntology.NAME));
extensionsDispatcher = new FipaRequestResponderBehaviour(this, mtJADE);
mobilityMgr = new MobilityManager(this);
registerTool = new RegisterToolBehaviour();
deregisterTool = new DeregisterToolBehaviour();
notifyTools = new NotifyToolsBehaviour();
tools = new ArrayList();
toolNotification.setSender(new AID());
toolNotification.setLanguage(SL0Codec.NAME);
toolNotification.setOntology(JADEIntrospectionOntology.NAME);
toolNotification.setInReplyTo("tool-subscription");
// Associate each AMS action name with the behaviour to execute
// when the action is requested in a 'request' ACL message
dispatcher.registerFactory(FIPAAgentManagementOntology.REGISTER, new RegBehaviour(null));
dispatcher.registerFactory(FIPAAgentManagementOntology.DEREGISTER, new DeregBehaviour(null));
dispatcher.registerFactory(FIPAAgentManagementOntology.MODIFY, new ModBehaviour(null));
dispatcher.registerFactory(FIPAAgentManagementOntology.SEARCH, new SrchBehaviour(null));
dispatcher.registerFactory(FIPAAgentManagementOntology.GETDESCRIPTION, new GetDescriptionBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.CREATEAGENT, new CreateBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.KILLAGENT, new KillBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.KILLCONTAINER, new KillContainerBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.SNIFFON, new SniffAgentOnBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.DEBUGOFF, new DebugAgentOffBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.DEBUGON, new DebugAgentOnBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.SNIFFOFF, new SniffAgentOffBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.INSTALLMTP, new InstallMTPBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.UNINSTALLMTP, new UninstallMTPBehaviour(null));
}
/**
This method starts the <em>AMS</em> behaviours to allow the agent
to carry on its duties within <em><b>JADE</b></em> agent platform.
*/
protected void setup() {
// Fill the ':name' slot of the Agent Platform Profile with the Platform ID.
theProfile.setName("\"" + getHap() + "\"");
writeAPDescription();
// Register the supported ontologies
registerOntology(FIPAAgentManagementOntology.NAME, FIPAAgentManagementOntology.instance());
registerOntology(JADEAgentManagementOntology.NAME, JADEAgentManagementOntology.instance());
registerOntology(JADEIntrospectionOntology.NAME, JADEIntrospectionOntology.instance());
registerOntology(MobilityOntology.NAME, MobilityOntology.instance());
// register the supported languages
registerLanguage(SL0Codec.NAME, new SL0Codec());
// Add a dispatcher Behaviour for all ams actions following from a
// 'fipa-request' interaction with 'fipa-agent-management' ontology.
addBehaviour(dispatcher);
// Add a dispatcher Behaviour for all ams actions following from a
// 'fipa-request' interaction with 'jade-agent-management' ontology.
addBehaviour(extensionsDispatcher);
// Add a main behaviour to manage mobility related messages
addBehaviour(mobilityMgr.getMain());
// Add a Behaviour to accept incoming tool registrations and a
// Behaviour to broadcast events to registered tools.
addBehaviour(registerTool);
addBehaviour(deregisterTool);
addBehaviour(notifyTools);
}
/**
* checks that all the mandatory slots for a register/modify/deregister action
* are present.
* @param actionName is the name of the action (one of
* <code>FIPAAgentManagementOntology.REGISTER</code>,
* <code>FIPAAgentManagementOntology.MODIFY</code>,
* <code>FIPAAgentManagementOntology.DEREGISTER</code>)
* @param amsd is the AMSAgentDescription to be checked for
* @throws MissingParameter if one of the mandatory slots is missing
**/
private void checkMandatorySlots(String actionName, AMSAgentDescription amsd) throws MissingParameter {
try {
AID name = amsd.getName();
if ((name == null)||(name.getName().length() == 0))
throw new MissingParameter(FIPAAgentManagementOntology.AMSAGENTDESCRIPTION, "name");
} catch (Exception e) {
e.printStackTrace();
throw new MissingParameter(FIPAAgentManagementOntology.AMSAGENTDESCRIPTION, "name");
}
if (!actionName.equalsIgnoreCase(FIPAAgentManagementOntology.DEREGISTER))
try {
String state = amsd.getState();
if((state == null)||(state.length() == 0))
throw new MissingParameter(FIPAAgentManagementOntology.AMSAGENTDESCRIPTION, "state");
} catch (Exception e) {
e.printStackTrace();
throw new MissingParameter(FIPAAgentManagementOntology.AMSAGENTDESCRIPTION, "state");
}
}
/**
@serial
*/
private KB agentDescriptions = new KBAbstractImpl() {
protected boolean match(Object template, Object fact) {
try {
AMSAgentDescription templateDesc = (AMSAgentDescription)template;
AMSAgentDescription factDesc = (AMSAgentDescription)fact;
String o1 = templateDesc.getOwnership();
if(o1 != null) {
String o2 = factDesc.getOwnership();
if((o2 == null) || (!o1.equalsIgnoreCase(o2)))
return false;
}
String s1 = templateDesc.getState();
if(s1 != null) {
String s2 = factDesc.getState();
if((s2 == null) || (!s1.equalsIgnoreCase(s2)))
return false;
}
AID id1 = templateDesc.getName();
if(id1 != null) {
AID id2 = factDesc.getName();
if((id2 == null) || (!matchAID(id1, id2)))
return false;
}
return true;
}
catch(ClassCastException cce) {
return false;
}
}
};
/** it is called also by Agent.java **/
public void AMSRegister(AMSAgentDescription amsd) throws FIPAException, JADESecurityException {
checkMandatorySlots(FIPAAgentManagementOntology.REGISTER, amsd);
String[] addresses = myPlatform.platformAddresses();
AID id = amsd.getName();
for(int i = 0; i < addresses.length; i++)
id.addAddresses(addresses[i]);
try {
String ownership = amsd.getOwnership();
int dot2 = ownership.indexOf(':');
String username = dot2 != -1 ? ownership.substring(0, dot2) : ownership;
String password = dot2 != -1 ? ownership.substring(dot2 + 1, ownership.length()) : "";
Object old = agentDescriptions.register(amsd.getName(), amsd);
if(old != null)
throw new AlreadyRegistered();
//else
if (false)
myPlatform.changeAgentPrincipal(amsd.getName(), new UserPrincipal(username), password.getBytes());
}
catch (NotFoundException nfe) {
nfe.printStackTrace();
}
catch (UnreachableException ue) {
ue.printStackTrace();
}
}
/** it is called also by Agent.java **/
public void AMSDeregister(AMSAgentDescription amsd) throws FIPAException {
checkMandatorySlots(FIPAAgentManagementOntology.DEREGISTER, amsd);
Object old = agentDescriptions.deregister(amsd.getName());
if(old == null)
throw new NotRegistered();
// System.out.println(amsd.getName().getName()+ " deregistered from AMS" );
}
private void AMSModify(AMSAgentDescription amsd) throws FIPAException, JADESecurityException {
checkMandatorySlots(FIPAAgentManagementOntology.MODIFY, amsd);
AMSAgentDescription old = (AMSAgentDescription)agentDescriptions.deregister(amsd.getName());
if (old == null)
throw new NotRegistered();
agentDescriptions.register(amsd.getName(), amsd);
try {
String ownership = amsd.getOwnership();
int dot2 = ownership.indexOf(':');
String username = dot2 != -1 ? ownership.substring(0, dot2) : ownership;
String password = dot2 != -1 ? ownership.substring(dot2 + 1, ownership.length()) : "";
if (!old.getState().equals(amsd.SUSPENDED) && amsd.getState().equals(amsd.SUSPENDED))
myPlatform.suspend(amsd.getName(), "");
if (old.getState().equals(amsd.SUSPENDED) && !amsd.getState().equals(amsd.SUSPENDED))
myPlatform.activate(amsd.getName(), "");
//__JADE_ONLY__BEGIN
if (!old.getOwnership().equalsIgnoreCase(amsd.getOwnership()))
myPlatform.changeAgentPrincipal(amsd.getName(), new UserPrincipal(username), password.getBytes());
//__JADE_ONLY__END
}
catch (NotFoundException nfe) {
nfe.printStackTrace();
}
catch (UnreachableException ue) {
ue.printStackTrace();
}
}
private List AMSSearch(AMSAgentDescription amsd, SearchConstraints constraints, ACLMessage reply) throws FIPAException {
// Search has no mandatory slots
return agentDescriptions.search(amsd);
}
// This one is called in response to a 'move-agent' action
void AMSMoveAgent(AID agentID, Location where) throws FIPAException {
try {
myPlatform.move(agentID, where, "");
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
// This one is called in response to a 'clone-agent' action
void AMSCloneAgent(AID agentID, Location where, String newName) throws FIPAException {
try {
myPlatform.copy(agentID, where, newName, "");
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
// This one is called in response to a 'where-is-agent' action
Location AMSWhereIsAgent(AID agentID) throws FIPAException {
try {
ContainerID cid = myPlatform.getContainerID(agentID);
String containerName = cid.getName();
return mobilityMgr.getLocation(containerName);
}
catch(NotFoundException nfe) {
nfe.printStackTrace();
throw new NotRegistered();
}
}
// This one is called in response to a 'query-platform-locations' action
Iterator AMSGetPlatformLocations() {
return mobilityMgr.getLocations();
}
// Methods to be called from AgentPlatform to notify AMS of special events
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void addedContainer(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
String name = cid.getName();
// Add a new location to the locations list
mobilityMgr.addLocation(name, cid);
// Fire an 'added container' event
AddedContainer ac = new AddedContainer();
ac.setContainer(cid);
EventRecord er = new EventRecord(ac, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void removedContainer(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
String name = cid.getName();
// Remove the location from the location list
mobilityMgr.removeLocation(name);
// Fire a 'container is dead' event
RemovedContainer rc = new RemovedContainer();
rc.setContainer(cid);
EventRecord er = new EventRecord(rc, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void bornAgent(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
BornAgent ba = new BornAgent();
ba.setAgent(agentID);
ba.setWhere(cid);
EventRecord er = new EventRecord(ba, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void deadAgent(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
// Deregister the agent, if it's still there.
try {
AMSAgentDescription amsd = new AMSAgentDescription();
amsd.setName(agentID);
AMSDeregister(amsd);
}
catch(NotRegistered nr){
//the agent deregistered already during his dodolete method.
}
catch(FIPAException fe) {
fe.printStackTrace();
}
DeadAgent da = new DeadAgent();
da.setAgent(agentID);
da.setWhere(cid);
EventRecord er = new EventRecord(da, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void suspendedAgent(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
// Registry needs an update here!
SuspendedAgent sa = new SuspendedAgent();
sa.setAgent(agentID);
sa.setWhere(cid);
EventRecord er = new EventRecord(sa, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void resumedAgent(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
// Registry needs an update here!
ResumedAgent ra = new ResumedAgent();
ra.setAgent(agentID);
ra.setWhere(cid);
EventRecord er = new EventRecord(ra, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
//__JADE_ONLY__BEGIN
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void changedAgentPrincipal(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
// Registry needs an update here!
ChangedAgentPrincipal cap = new ChangedAgentPrincipal();
cap.setAgent(agentID);
cap.setWhere(cid);
cap.setOldPrincipal(ev.getOldPrincipal());
cap.setNewPrincipal(ev.getNewPrincipal());
EventRecord er = new EventRecord(cap, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
//__JADE_ONLY__END
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void movedAgent(PlatformEvent ev) {
ContainerID from = ev.getContainer();
ContainerID to = ev.getNewContainer();
AID agentID = ev.getAgent();
MovedAgent ma = new MovedAgent();
ma.setAgent(agentID);
ma.setFrom(from);
ma.setTo(to);
EventRecord er = new EventRecord(ma, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void addedMTP(MTPEvent ev) {
Channel ch = ev.getChannel();
ContainerID cid = ev.getPlace();
String proto = ch.getProtocol();
String address = ch.getAddress();
// Add the new address to the platform profile
APTransportDescription mtps = theProfile.getTransportProfile();
MTPDescription desc = findMTPDescription(mtps, proto);
desc.addAddresses(address);
// Update the APDescription file.
if(getState() != AP_INITIATED)
writeAPDescription();
// Retrieve all agent descriptors
AMSAgentDescription amsd = new AMSAgentDescription();
List l = agentDescriptions.search(amsd);
// Add the new address to all the agent descriptors
Iterator it = l.iterator();
while(it.hasNext()) {
AMSAgentDescription ad = (AMSAgentDescription)it.next();
AID name = ad.getName();
name.addAddresses(address);
}
// Generate a suitable AMS event
AddedMTP amtp = new AddedMTP();
amtp.setAddress(address);
amtp.setWhere(cid);
EventRecord er = new EventRecord(amtp, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
//Notify the update of the APDescription...
PlatformDescription ap = new PlatformDescription();
ap.setPlatform(theProfile);
er = new EventRecord(ap, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/** Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void removedMTP(MTPEvent ev) {
Channel ch = ev.getChannel();
ContainerID cid = ev.getPlace();
String proto = ch.getProtocol();
String address = ch.getAddress();
// Remove the dead address from the platform profile
APTransportDescription mtps = theProfile.getTransportProfile();
MTPDescription desc = findMTPDescription(mtps, proto);
Iterator addresses = desc.getAllAddresses();
while(addresses.hasNext()) {
// Remove all MTPs that have the 'address' String in their
// address list.
String nextAddr = (String)addresses.next();
if(nextAddr.equalsIgnoreCase(address))
addresses.remove();
}
// Check if there are other addresses left for this MTP: if not,
// remove the MTP from the 'ap-platform-description' object
addresses = desc.getAllAddresses();
if(!addresses.hasNext())
mtps.removeAvailableMtps(desc);
//update the APDescription file
writeAPDescription();
// Remove the dead address from all the registered agents
AID[] agents = myPlatform.agentNames();
AMSAgentDescription amsd = new AMSAgentDescription();
for(int i = 0; i < agents.length; i++) {
amsd.setName(agents[i]);
List l = agentDescriptions.search(amsd);
if(!l.isEmpty()) {
AMSAgentDescription amsDesc = (AMSAgentDescription)l.get(0);
AID name = amsDesc.getName();
name.removeAddresses(address);
}
}
// Generate a suitable AMS event
RemovedMTP rmtp = new RemovedMTP();
rmtp.setAddress(address);
rmtp.setWhere(cid);
EventRecord er = new EventRecord(rmtp, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
//Notify the update of the APDescription...
PlatformDescription ap = new PlatformDescription();
ap.setPlatform(theProfile);
er = new EventRecord(ap, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
public void messageIn(MTPEvent ev) { System.out.println("Message In."); }
public void messageOut(MTPEvent ev) { System.out.println("Message Out."); }
private void writeAPDescription()
{
//Write the APDescription file.
try{
FileWriter f = new FileWriter("APDescription.txt");
f.write(theProfile.toString());
//f.write(s, 0, s.length());
f.write('\n');
f.flush();
f.close();
}catch(java.io.IOException ioe){ioe.printStackTrace();}
}
private MTPDescription findMTPDescription(APTransportDescription mtps, String proto) {
Iterator it = mtps.getAllAvailableMtps();
while(it.hasNext()) {
MTPDescription desc = (MTPDescription)it.next();
if(proto.equalsIgnoreCase(desc.getMtpName()))
return desc;
}
// No MTP was found: create a new one and add it to the
// 'ap-transport-description' object.
MTPDescription desc = new MTPDescription();
desc.setMtpName(proto);
mtps.addAvailableMtps(desc);
return desc;
}
} // End of class ams
|
src/jade/domain/ams.java
|
/*****************************************************************
JADE - Java Agent DEvelopment Framework is a framework to develop
multi-agent systems in compliance with the FIPA specifications.
Copyright (C) 2000 CSELT S.p.A.
The updating of this file to JADE 2.0 has been partially supported by the IST-1999-10211 LEAP Project
GNU Lesser General Public License
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation,
version 2.1 of the License.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
*****************************************************************/
package jade.domain;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.BufferedWriter;
import java.io.OutputStreamWriter;
import java.io.FileWriter;
import java.net.InetAddress;
import jade.util.leap.Iterator;
import jade.util.leap.List;
import jade.util.leap.ArrayList;
import jade.util.leap.Set;
import jade.util.leap.Map;
import jade.util.leap.HashMap;
import jade.core.*;
import jade.core.behaviours.*;
import jade.core.event.PlatformEvent;
import jade.core.event.MTPEvent;
import jade.domain.FIPAAgentManagement.*;
import jade.domain.JADEAgentManagement.*;
import jade.domain.introspection.*;
import jade.lang.acl.ACLMessage;
import jade.lang.acl.MessageTemplate;
import jade.lang.Codec;
import jade.lang.sl.SL0Codec;
import jade.onto.Ontology;
import jade.onto.OntologyException;
import jade.onto.Frame;
import jade.onto.basic.Action;
import jade.onto.basic.BasicOntology;
import jade.onto.basic.ResultPredicate;
import jade.onto.basic.DonePredicate;
import jade.onto.basic.TrueProposition;
import jade.mtp.MTPException;
import jade.proto.FipaRequestResponderBehaviour;
//__JADE_ONLY__BEGIN
import jade.security.AgentPrincipal;
import jade.security.UserPrincipal;
//__JADE_ONLY__END
import jade.security.JADESecurityException;
/**
Standard <em>Agent Management System</em> agent. This class
implements <em><b>FIPA</b></em> <em>AMS</em> agent. <b>JADE</b>
applications cannot use this class directly, but interact with it
through <em>ACL</em> message passing.
@author Giovanni Rimassa - Universita` di Parma
@version $Date$ $Revision$
*/
public class ams extends Agent implements AgentManager.Listener {
private abstract class AMSBehaviour
extends FipaRequestResponderBehaviour.ActionHandler
implements FipaRequestResponderBehaviour.Factory {
protected AMSBehaviour(ACLMessage req) {
super(ams.this,req);
}
/**
* Create the content for the AGREE message
* @param a is the action that has been agreed to perform
* @return a String with the content ready to be set into the message
**/
protected String createAgreeContent(Action a) {
ACLMessage temp = new ACLMessage(ACLMessage.AGREE);
temp.setLanguage(getRequest().getLanguage());
temp.setOntology(getRequest().getOntology());
List l = new ArrayList(2);
if (a == null) {
a = new Action();
a.set_0(getAID());
a.set_1("UnknownAction");
}
l.add(a);
l.add(new TrueProposition());
try {
fillMsgContent(temp,l);
} catch (Exception ee) { // in any case try to return some good content
return "( true )";
}
return temp.getContent();
}
/**
* Create the content for a so-called "exceptional" message, i.e.
* one of NOT_UNDERSTOOD, FAILURE, REFUSE message
* @param a is the Action that generated the exception
* @param e is the generated Exception
* @return a String containing the content to be sent back in the reply
* message; in case an exception is thrown somewhere, the method
* try to return anyway a valid content with a best-effort strategy
**/
protected String createExceptionalMsgContent(Action a, String ontoName, FIPAException e) {
ACLMessage temp = new ACLMessage(ACLMessage.NOT_UNDERSTOOD);
temp.setLanguage(SL0Codec.NAME);
temp.setOntology(ontoName);
List l = new ArrayList(2);
if (a == null) {
a = new Action();
a.set_0(getAID());
a.set_1("UnknownAction");
}
l.add(a);
l.add(e);
try {
fillMsgContent(temp,l);
} catch (Exception ee) { // in any case try to return some good content
return e.getMessage();
}
return temp.getContent();
}
// Each concrete subclass will implement this deferred method to
// do action-specific work
protected abstract void processAction(Action a) throws FIPAException, JADESecurityException;
public void action() {
Action a = null;
try {
ACLMessage msg = getRequest();
List l = myAgent.extractMsgContent(msg);
a = (Action)l.get(0);
// Do real action, deferred to subclasses
processAction(a);
}
catch (FIPAException fe) {
String ontoName = getRequest().getOntology();
sendReply((fe instanceof FailureException?ACLMessage.FAILURE:ACLMessage.REFUSE),createExceptionalMsgContent(a, ontoName, fe));
}
catch (JADESecurityException se) {
sendReply(ACLMessage.REFUSE, "(" + se.getMessage() + ")");
}
}
/**
Writes the <code>Done</code> predicate for the specific action
into the result <code>String</code> object, encoded in SL0.
*/
protected String doneAction(Action a) throws FIPAException {
try {
Ontology o = lookupOntology(getRequest().getOntology());
DonePredicate dp = new DonePredicate();
dp.set_0(a);
Frame f = o.createFrame(dp, BasicOntology.DONE);
List l = new ArrayList(1);
l.add(f);
Codec c = lookupLanguage(SL0Codec.NAME);
String result = c.encode(l, o);
return result;
}
catch(OntologyException oe) {
oe.printStackTrace();
throw new FIPAException("Internal error in building Done predicate.");
}
}
public boolean done() {
return true;
}
public void reset() {
}
} // End of AMSBehaviour class
// These four concrete classes serve both as a Factory and as an
// Action: when seen as Factory they can spawn a new
// Behaviour to process a given request, and when seen as
// Action they process their request and terminate.
private class RegBehaviour extends AMSBehaviour {
public RegBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new RegBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException, JADESecurityException {
Register r = (Register)a.getAction();
AMSAgentDescription amsd = (AMSAgentDescription)r.get_0();
// This agent was created by some other, which is still
// waiting for an 'inform' message. Recover the buffered
// message from the Map and send it back.
ACLMessage informCreator = (ACLMessage)pendingInforms.remove(amsd.getName());
//The message in pendingInforms can be registered with only the localName
//without the platformID
if(informCreator == null) {
String name = amsd.getName().getName();
int atPos = name.lastIndexOf('@');
if(atPos > 0) {
name = name.substring(0, atPos);
informCreator = (ACLMessage)pendingInforms.remove(name);
}
}
try {
// Write new agent data in AMS Agent Table
AMSRegister(amsd);
sendReply(ACLMessage.AGREE,createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
// Inform agent creator that registration was successful.
if(informCreator != null) {
send(informCreator);
}
}
catch(AlreadyRegistered are) {
sendReply(ACLMessage.AGREE, createAgreeContent(a));
String ontoName = getRequest().getOntology();
sendReply(ACLMessage.FAILURE,createExceptionalMsgContent(a, ontoName, are));
// Inform agent creator that registration failed.
if(informCreator != null) {
informCreator.setPerformative(ACLMessage.FAILURE);
informCreator.setContent(createExceptionalMsgContent(a, ontoName, are));
send(informCreator);
}
}
}
} // End of RegBehaviour class
private class DeregBehaviour extends AMSBehaviour {
public DeregBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new DeregBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
Deregister d = (Deregister)a.getAction();
AMSAgentDescription amsd = (AMSAgentDescription)d.get_0();
AMSDeregister(amsd);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM,doneAction(a));
}
} // End of DeregBehaviour class
private class ModBehaviour extends AMSBehaviour {
public ModBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new ModBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException, JADESecurityException {
Modify m = (Modify)a.getAction();
AMSAgentDescription amsd = (AMSAgentDescription)m.get_0();
AMSModify(amsd);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM,doneAction(a));
}
} // End of ModBehaviour class
private class SrchBehaviour extends AMSBehaviour {
public SrchBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new SrchBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
Search s = (Search)a.getAction();
AMSAgentDescription amsd = (AMSAgentDescription)s.get_0();
SearchConstraints constraints = s.get_1();
List l = AMSSearch(amsd, constraints, getReply());
sendReply(ACLMessage.AGREE,createAgreeContent(a));
ACLMessage msg = getRequest().createReply();
msg.setPerformative(ACLMessage.INFORM);
ResultPredicate r = new ResultPredicate();
r.set_0(a);
for (int i=0; i<l.size(); i++)
r.add_1(l.get(i));
l.clear();
l.add(r);
fillMsgContent(msg,l);
send(msg);
}
} // End of SrchBehaviour class
private class GetDescriptionBehaviour extends AMSBehaviour {
public GetDescriptionBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new GetDescriptionBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
sendReply(ACLMessage.AGREE, createAgreeContent(a));
ACLMessage reply = getReply();
reply.setPerformative(ACLMessage.INFORM);
List l = new ArrayList(1);
ResultPredicate rp = new ResultPredicate();
rp.set_0(a);
rp.add_1(theProfile);
ArrayList list = new ArrayList(1);
list.add(rp);
fillMsgContent(reply,list);
send(reply);
}
} // End of GetDescriptionBehaviour class
// These Behaviours handle interactions with platform tools.
private class RegisterToolBehaviour extends CyclicBehaviour {
private MessageTemplate subscriptionTemplate;
RegisterToolBehaviour() {
MessageTemplate mt1 = MessageTemplate.MatchLanguage(SL0Codec.NAME);
MessageTemplate mt2 = MessageTemplate.MatchOntology(JADEIntrospectionOntology.NAME);
MessageTemplate mt12 = MessageTemplate.and(mt1, mt2);
mt1 = MessageTemplate.MatchReplyWith("tool-subscription");
mt2 = MessageTemplate.MatchPerformative(ACLMessage.SUBSCRIBE);
subscriptionTemplate = MessageTemplate.and(mt1, mt2);
subscriptionTemplate = MessageTemplate.and(subscriptionTemplate, mt12);
}
public void action() {
// Receive 'subscribe' ACL messages.
ACLMessage current = receive(subscriptionTemplate);
if(current != null) {
// FIXME: Should parse 'iota ?x ...'
// Get new tool name from subscription message
AID newTool = current.getSender();
try {
// Send back the whole container list.
ContainerID[] ids = myPlatform.containerIDs();
for(int i = 0; i < ids.length; i++) {
ContainerID cid = ids[i];
AddedContainer ac = new AddedContainer();
ac.setContainer(cid);
EventRecord er = new EventRecord(ac, here());
Occurred o = new Occurred();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
toolNotification.clearAllReceiver();
toolNotification.addReceiver(newTool);
fillMsgContent(toolNotification, l);
send(toolNotification);
}
// Send all agent names, along with their container name.
AID[] agents = myPlatform.agentNames();
for(int i = 0; i < agents.length; i++) {
AID agentName = agents[i];
ContainerID cid = myPlatform.getContainerID(agentName);
BornAgent ba = new BornAgent();
ba.setAgent(agentName);
ba.setWhere(cid);
EventRecord er = new EventRecord(ba, here());
Occurred o = new Occurred();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
toolNotification.clearAllReceiver();
toolNotification.addReceiver(newTool);
fillMsgContent(toolNotification, l);
send(toolNotification);
}
// Send the list of the installed MTPs
String[] addresses = myPlatform.platformAddresses();
for(int i = 0; i < addresses.length; i++) {
AddedMTP amtp = new AddedMTP();
amtp.setAddress(addresses[i]);
amtp.setWhere(new ContainerID(AgentManager.MAIN_CONTAINER_NAME, null)); // FIXME: should use AgentManager to know the container
EventRecord er = new EventRecord(amtp, here());
Occurred o = new Occurred();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
toolNotification.clearAllReceiver();
toolNotification.addReceiver(newTool);
fillMsgContent(toolNotification, l);
send(toolNotification);
}
//Notification to the RMA of the APDescription
PlatformDescription ap = new PlatformDescription();
ap.setPlatform(theProfile);
EventRecord er = new EventRecord(ap,here());
Occurred o = new Occurred();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
toolNotification.clearAllReceiver();
toolNotification.addReceiver(newTool);
fillMsgContent(toolNotification, l);
send(toolNotification);
// Add the new tool to tools list.
tools.add(newTool);
}
catch(NotFoundException nfe) {
nfe.printStackTrace();
}
catch(FIPAException fe) {
fe.printStackTrace();
}
}
else
block();
}
} // End of RegisterToolBehaviour class
private class DeregisterToolBehaviour extends CyclicBehaviour {
private MessageTemplate cancellationTemplate;
DeregisterToolBehaviour() {
MessageTemplate mt1 = MessageTemplate.MatchLanguage(SL0Codec.NAME);
MessageTemplate mt2 = MessageTemplate.MatchOntology(JADEIntrospectionOntology.NAME);
MessageTemplate mt12 = MessageTemplate.and(mt1, mt2);
mt1 = MessageTemplate.MatchReplyWith("tool-cancellation");
mt2 = MessageTemplate.MatchPerformative(ACLMessage.CANCEL);
cancellationTemplate = MessageTemplate.and(mt1, mt2);
cancellationTemplate = MessageTemplate.and(cancellationTemplate, mt12);
}
public void action() {
// Receive 'cancel' ACL messages.
ACLMessage current = receive(cancellationTemplate);
if(current != null) {
// FIXME: Should parse the content
// Remove this tool to tools agent group.
tools.remove(current.getSender());
}
else
block();
}
} // End of DeregisterToolBehaviour class
private class NotifyToolsBehaviour extends CyclicBehaviour {
public void action() {
synchronized(ams.this) { // Mutual exclusion with handleXXX() methods
// Look into the event buffer
Iterator it = eventQueue.iterator();
Occurred o = new Occurred();
while(it.hasNext()) {
// Write the event into the notification message
EventRecord er = (EventRecord)it.next();
o.set_0(er);
List l = new ArrayList(1);
l.add(o);
try {
fillMsgContent(toolNotification, l);
}
catch(FIPAException fe) {
fe.printStackTrace();
}
// Put all tools in the receiver list
toolNotification.clearAllReceiver();
Iterator toolIt = tools.iterator();
while(toolIt.hasNext()) {
AID tool = (AID)toolIt.next();
toolNotification.addReceiver(tool);
}
send(toolNotification);
it.remove();
}
}
block();
}
} // End of NotifyToolsBehaviour class
private class KillContainerBehaviour extends AMSBehaviour {
public KillContainerBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new KillContainerBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
KillContainer kc = (KillContainer)a.get_1();
ContainerID cid = kc.getContainer();
myPlatform.killContainer(cid);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM,doneAction(a));
}
} // End of KillContainerBehaviour class
private class CreateBehaviour extends AMSBehaviour {
public CreateBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new CreateBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException, JADESecurityException {
CreateAgent ca = (CreateAgent)a.get_1();
String agentName = ca.getAgentName();
String className = ca.getClassName();
ContainerID container = ca.getContainer();
Iterator arg = ca.getAllArguments(); //return an iterator of all arguments
//create the array of string
ArrayList listArg = new ArrayList();
while(arg.hasNext())
listArg.add(arg.next().toString());
String[] arguments = new String[listArg.size()];
for(int n = 0; n< listArg.size(); n++)
arguments[n] = (String)listArg.get(n);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
try {
myPlatform.create(agentName, className, arguments, container, null, null); //!!!
// An 'inform Done' message will be sent to the requester only
// when the newly created agent will register itself with the
// AMS. The new agent's name will be used as the key in the map.
ACLMessage reply = getReply();
reply = (ACLMessage)reply.clone();
reply.setPerformative(ACLMessage.INFORM);
reply.setContent(doneAction(a));
pendingInforms.put(agentName, reply);
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError(ue.getMessage());
}
}
} // End of CreateBehaviour class
private class KillBehaviour extends AMSBehaviour {
public KillBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new KillBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
// Kill an agent
KillAgent ka = (KillAgent)a.get_1();
AID agentID = ka.getAgent();
String password = ka.getPassword();
try {
myPlatform.kill(agentID, password);
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of KillBehaviour class
private class SniffAgentOnBehaviour extends AMSBehaviour {
public SniffAgentOnBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new SniffAgentOnBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
SniffOn so = (SniffOn)a.get_1();
try {
myPlatform.sniffOn(so.getSniffer(), so.getCloneOfSniffedAgents());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of SniffAgentOnBehaviour class
private class SniffAgentOffBehaviour extends AMSBehaviour {
public SniffAgentOffBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new SniffAgentOffBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
SniffOff so = (SniffOff)a.get_1();
try {
myPlatform.sniffOff(so.getSniffer(), so.getCloneOfSniffedAgents());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM,doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of SniffAgentOffBehaviour class
private class DebugAgentOnBehaviour extends AMSBehaviour {
public DebugAgentOnBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new DebugAgentOnBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
DebugOn dbgOn = (DebugOn)a.get_1();
try {
myPlatform.debugOn(dbgOn.getDebugger(), dbgOn.getCloneOfDebuggedAgents());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of DebugAgentOnBehaviour class
private class DebugAgentOffBehaviour extends AMSBehaviour {
public DebugAgentOffBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new DebugAgentOffBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
DebugOff dbgOff = (DebugOff)a.get_1();
try {
myPlatform.debugOff(dbgOff.getDebugger(), dbgOff.getCloneOfDebuggedAgents());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
} // End of DebugAgentOffBehaviour class
private class InstallMTPBehaviour extends AMSBehaviour {
public InstallMTPBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new InstallMTPBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
InstallMTP imtp = (InstallMTP)a.get_1();
try {
myPlatform.installMTP(imtp.getAddress(), imtp.getContainer(), imtp.getClassName());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(NotFoundException nfe) {
throw new jade.domain.FIPAAgentManagement.UnrecognisedParameterValue("MTP", nfe.getMessage());
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError(ue.getMessage());
}
catch(MTPException mtpe) {
throw new jade.domain.FIPAAgentManagement.UnrecognisedParameterValue("MTP", mtpe.getMessage());
}
}
} // End of InstallMTPBehaviour class
private class UninstallMTPBehaviour extends AMSBehaviour {
public UninstallMTPBehaviour(ACLMessage msg) {
super(msg);
}
public FipaRequestResponderBehaviour.ActionHandler create(ACLMessage msg) {
return new UninstallMTPBehaviour(msg);
}
protected void processAction(Action a) throws FIPAException {
UninstallMTP umtp = (UninstallMTP)a.get_1();
try {
myPlatform.uninstallMTP(umtp.getAddress(), umtp.getContainer());
sendReply(ACLMessage.AGREE, createAgreeContent(a));
sendReply(ACLMessage.INFORM, doneAction(a));
}
catch(NotFoundException nfe) {
throw new jade.domain.FIPAAgentManagement.UnrecognisedParameterValue("MTP", nfe.getMessage());
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError(ue.getMessage());
}
catch(MTPException mtpe) {
throw new jade.domain.FIPAAgentManagement.UnrecognisedParameterValue("MTP", mtpe.getMessage());
}
}
} // End of UninstallMTPBehaviour class
// The AgentPlatform where information about agents is stored
/**
@serial
*/
private AgentManager myPlatform;
// Maintains an association between action names and behaviours to
// handle 'fipa-agent-management' actions
/**
@serial
*/
private FipaRequestResponderBehaviour dispatcher;
// Maintains an association between action names and behaviours to
// handle 'jade-agent-management' actions
/**
@serial
*/
private FipaRequestResponderBehaviour extensionsDispatcher;
// Contains a main Behaviour and some utilities to handle JADE mobility
/**
@serial
*/
private MobilityManager mobilityMgr;
// Behaviour to listen to incoming 'subscribe' messages from tools.
/**
@serial
*/
private RegisterToolBehaviour registerTool;
// Behaviour to broadcats AgentPlatform notifications to each
// registered tool.
/**
@serial
*/
private NotifyToolsBehaviour notifyTools;
// Behaviour to listen to incoming 'cancel' messages from tools.
/**
@serial
*/
private DeregisterToolBehaviour deregisterTool;
// Group of tools registered with this AMS
/**
@serial
*/
private List tools;
// ACL Message to use for tool notification
/**
@serial
*/
private ACLMessage toolNotification = new ACLMessage(ACLMessage.INFORM);
// Buffer for AgentPlatform notifications
/**
@serial
*/
private List eventQueue = new ArrayList(10);
/**
@serial
*/
private Map pendingInforms = new HashMap();
/**
@serial
*/
private APDescription theProfile = new APDescription();
/**
This constructor creates a new <em>AMS</em> agent. Since a direct
reference to an Agent Platform implementation must be passed to
it, this constructor cannot be called from application
code. Therefore, no other <em>AMS</em> agent can be created
beyond the default one.
*/
public ams(AgentManager ap) {
// Fill Agent Platform Profile with data.
theProfile.setDynamic(new Boolean(false));
theProfile.setMobility(new Boolean(false));
APTransportDescription mtps = new APTransportDescription();
theProfile.setTransportProfile(mtps);
myPlatform = ap;
myPlatform.addListener(this);
MessageTemplate mtFIPA =
MessageTemplate.and(MessageTemplate.MatchLanguage(SL0Codec.NAME),
MessageTemplate.MatchOntology(FIPAAgentManagementOntology.NAME));
dispatcher = new FipaRequestResponderBehaviour(this, mtFIPA);
MessageTemplate mtJADE =
MessageTemplate.and(MessageTemplate.MatchLanguage(SL0Codec.NAME),
MessageTemplate.MatchOntology(JADEAgentManagementOntology.NAME));
extensionsDispatcher = new FipaRequestResponderBehaviour(this, mtJADE);
mobilityMgr = new MobilityManager(this);
registerTool = new RegisterToolBehaviour();
deregisterTool = new DeregisterToolBehaviour();
notifyTools = new NotifyToolsBehaviour();
tools = new ArrayList();
toolNotification.setSender(new AID());
toolNotification.setLanguage(SL0Codec.NAME);
toolNotification.setOntology(JADEIntrospectionOntology.NAME);
toolNotification.setInReplyTo("tool-subscription");
// Associate each AMS action name with the behaviour to execute
// when the action is requested in a 'request' ACL message
dispatcher.registerFactory(FIPAAgentManagementOntology.REGISTER, new RegBehaviour(null));
dispatcher.registerFactory(FIPAAgentManagementOntology.DEREGISTER, new DeregBehaviour(null));
dispatcher.registerFactory(FIPAAgentManagementOntology.MODIFY, new ModBehaviour(null));
dispatcher.registerFactory(FIPAAgentManagementOntology.SEARCH, new SrchBehaviour(null));
dispatcher.registerFactory(FIPAAgentManagementOntology.GETDESCRIPTION, new GetDescriptionBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.CREATEAGENT, new CreateBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.KILLAGENT, new KillBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.KILLCONTAINER, new KillContainerBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.SNIFFON, new SniffAgentOnBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.DEBUGOFF, new DebugAgentOffBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.DEBUGON, new DebugAgentOnBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.SNIFFOFF, new SniffAgentOffBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.INSTALLMTP, new InstallMTPBehaviour(null));
extensionsDispatcher.registerFactory(JADEAgentManagementOntology.UNINSTALLMTP, new UninstallMTPBehaviour(null));
}
/**
This method starts the <em>AMS</em> behaviours to allow the agent
to carry on its duties within <em><b>JADE</b></em> agent platform.
*/
protected void setup() {
// Fill the ':name' slot of the Agent Platform Profile with the Platform ID.
theProfile.setName("\"" + getHap() + "\"");
writeAPDescription();
// Register the supported ontologies
registerOntology(FIPAAgentManagementOntology.NAME, FIPAAgentManagementOntology.instance());
registerOntology(JADEAgentManagementOntology.NAME, JADEAgentManagementOntology.instance());
registerOntology(JADEIntrospectionOntology.NAME, JADEIntrospectionOntology.instance());
registerOntology(MobilityOntology.NAME, MobilityOntology.instance());
// register the supported languages
registerLanguage(SL0Codec.NAME, new SL0Codec());
// Add a dispatcher Behaviour for all ams actions following from a
// 'fipa-request' interaction with 'fipa-agent-management' ontology.
addBehaviour(dispatcher);
// Add a dispatcher Behaviour for all ams actions following from a
// 'fipa-request' interaction with 'jade-agent-management' ontology.
addBehaviour(extensionsDispatcher);
// Add a main behaviour to manage mobility related messages
addBehaviour(mobilityMgr.getMain());
// Add a Behaviour to accept incoming tool registrations and a
// Behaviour to broadcast events to registered tools.
addBehaviour(registerTool);
addBehaviour(deregisterTool);
addBehaviour(notifyTools);
}
/**
* checks that all the mandatory slots for a register/modify/deregister action
* are present.
* @param actionName is the name of the action (one of
* <code>FIPAAgentManagementOntology.REGISTER</code>,
* <code>FIPAAgentManagementOntology.MODIFY</code>,
* <code>FIPAAgentManagementOntology.DEREGISTER</code>)
* @param amsd is the AMSAgentDescription to be checked for
* @throws MissingParameter if one of the mandatory slots is missing
**/
private void checkMandatorySlots(String actionName, AMSAgentDescription amsd) throws MissingParameter {
try {
AID name = amsd.getName();
if ((name == null)||(name.getName().length() == 0))
throw new MissingParameter(FIPAAgentManagementOntology.AMSAGENTDESCRIPTION, "name");
} catch (Exception e) {
e.printStackTrace();
throw new MissingParameter(FIPAAgentManagementOntology.AMSAGENTDESCRIPTION, "name");
}
if (!actionName.equalsIgnoreCase(FIPAAgentManagementOntology.DEREGISTER))
try {
String state = amsd.getState();
if((state == null)||(state.length() == 0))
throw new MissingParameter(FIPAAgentManagementOntology.AMSAGENTDESCRIPTION, "state");
} catch (Exception e) {
e.printStackTrace();
throw new MissingParameter(FIPAAgentManagementOntology.AMSAGENTDESCRIPTION, "state");
}
}
/**
@serial
*/
private KB agentDescriptions = new KBAbstractImpl() {
protected boolean match(Object template, Object fact) {
try {
AMSAgentDescription templateDesc = (AMSAgentDescription)template;
AMSAgentDescription factDesc = (AMSAgentDescription)fact;
String o1 = templateDesc.getOwnership();
if(o1 != null) {
String o2 = factDesc.getOwnership();
if((o2 == null) || (!o1.equalsIgnoreCase(o2)))
return false;
}
String s1 = templateDesc.getState();
if(s1 != null) {
String s2 = factDesc.getState();
if((s2 == null) || (!s1.equalsIgnoreCase(s2)))
return false;
}
AID id1 = templateDesc.getName();
if(id1 != null) {
AID id2 = factDesc.getName();
if((id2 == null) || (!matchAID(id1, id2)))
return false;
}
return true;
}
catch(ClassCastException cce) {
return false;
}
}
};
/** it is called also by Agent.java **/
public void AMSRegister(AMSAgentDescription amsd) throws FIPAException, JADESecurityException {
checkMandatorySlots(FIPAAgentManagementOntology.REGISTER, amsd);
String[] addresses = myPlatform.platformAddresses();
AID id = amsd.getName();
for(int i = 0; i < addresses.length; i++)
id.addAddresses(addresses[i]);
try {
String ownership = amsd.getOwnership();
int dot2 = ownership.indexOf(':');
String username = dot2 != -1 ? ownership.substring(0, dot2) : ownership;
String password = dot2 != -1 ? ownership.substring(dot2 + 1, ownership.length()) : "";
Object old = agentDescriptions.register(amsd.getName(), amsd);
if(old != null)
throw new AlreadyRegistered();
//else
if (false)
myPlatform.changeAgentPrincipal(amsd.getName(), new UserPrincipal(username), password.getBytes());
}
catch (NotFoundException nfe) {
nfe.printStackTrace();
}
catch (UnreachableException ue) {
ue.printStackTrace();
}
}
/** it is called also by Agent.java **/
public void AMSDeregister(AMSAgentDescription amsd) throws FIPAException {
checkMandatorySlots(FIPAAgentManagementOntology.DEREGISTER, amsd);
Object old = agentDescriptions.deregister(amsd.getName());
if(old == null)
throw new NotRegistered();
// System.out.println(amsd.getName().getName()+ " deregistered from AMS" );
}
private void AMSModify(AMSAgentDescription amsd) throws FIPAException, JADESecurityException {
checkMandatorySlots(FIPAAgentManagementOntology.MODIFY, amsd);
AMSAgentDescription old = (AMSAgentDescription)agentDescriptions.deregister(amsd.getName());
if (old == null)
throw new NotRegistered();
agentDescriptions.register(amsd.getName(), amsd);
try {
String ownership = amsd.getOwnership();
int dot2 = ownership.indexOf(':');
String username = dot2 != -1 ? ownership.substring(0, dot2) : ownership;
String password = dot2 != -1 ? ownership.substring(dot2 + 1, ownership.length()) : "";
if (!old.getState().equals(amsd.SUSPENDED) && amsd.getState().equals(amsd.SUSPENDED))
myPlatform.suspend(amsd.getName(), "");
if (old.getState().equals(amsd.SUSPENDED) && !amsd.getState().equals(amsd.SUSPENDED))
myPlatform.activate(amsd.getName(), "");
//__JADE_ONLY__BEGIN
if (!old.getOwnership().equalsIgnoreCase(amsd.getOwnership()))
myPlatform.changeAgentPrincipal(amsd.getName(), new UserPrincipal(username), password.getBytes());
//__JADE_ONLY__END
}
catch (NotFoundException nfe) {
nfe.printStackTrace();
}
catch (UnreachableException ue) {
ue.printStackTrace();
}
}
private List AMSSearch(AMSAgentDescription amsd, SearchConstraints constraints, ACLMessage reply) throws FIPAException {
// Search has no mandatory slots
return agentDescriptions.search(amsd);
}
// This one is called in response to a 'move-agent' action
void AMSMoveAgent(AID agentID, Location where) throws FIPAException {
try {
myPlatform.move(agentID, where, "");
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
// This one is called in response to a 'clone-agent' action
void AMSCloneAgent(AID agentID, Location where, String newName) throws FIPAException {
try {
myPlatform.copy(agentID, where, newName, "");
}
catch(UnreachableException ue) {
throw new jade.domain.FIPAAgentManagement.InternalError("The container is not reachable");
}
catch(NotFoundException nfe) {
throw new NotRegistered();
}
}
// This one is called in response to a 'where-is-agent' action
Location AMSWhereIsAgent(AID agentID) throws FIPAException {
try {
ContainerID cid = myPlatform.getContainerID(agentID);
String containerName = cid.getName();
return mobilityMgr.getLocation(containerName);
}
catch(NotFoundException nfe) {
nfe.printStackTrace();
throw new NotRegistered();
}
}
// This one is called in response to a 'query-platform-locations' action
Iterator AMSGetPlatformLocations() {
return mobilityMgr.getLocations();
}
// Methods to be called from AgentPlatform to notify AMS of special events
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void addedContainer(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
String name = cid.getName();
// Add a new location to the locations list
mobilityMgr.addLocation(name, cid);
// Fire an 'added container' event
AddedContainer ac = new AddedContainer();
ac.setContainer(cid);
EventRecord er = new EventRecord(ac, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void removedContainer(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
String name = cid.getName();
// Remove the location from the location list
mobilityMgr.removeLocation(name);
// Fire a 'container is dead' event
RemovedContainer rc = new RemovedContainer();
rc.setContainer(cid);
EventRecord er = new EventRecord(rc, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void bornAgent(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
BornAgent ba = new BornAgent();
ba.setAgent(agentID);
ba.setWhere(cid);
EventRecord er = new EventRecord(ba, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void deadAgent(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
// Deregister the agent, if it's still there.
try {
AMSAgentDescription amsd = new AMSAgentDescription();
amsd.setName(agentID);
AMSDeregister(amsd);
}
catch(NotRegistered nr){
//the agent deregistered already during his dodolete method.
}
catch(FIPAException fe) {
fe.printStackTrace();
}
DeadAgent da = new DeadAgent();
da.setAgent(agentID);
da.setWhere(cid);
EventRecord er = new EventRecord(da, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void suspendedAgent(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
// Registry needs an update here!
SuspendedAgent sa = new SuspendedAgent();
sa.setAgent(agentID);
sa.setWhere(cid);
EventRecord er = new EventRecord(sa, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void resumedAgent(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
// Registry needs an update here!
ResumedAgent ra = new ResumedAgent();
ra.setAgent(agentID);
ra.setWhere(cid);
EventRecord er = new EventRecord(ra, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
//__JADE_ONLY__BEGIN
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void changedAgentPrincipal(PlatformEvent ev) {
ContainerID cid = ev.getContainer();
AID agentID = ev.getAgent();
// Registry needs an update here!
ChangedAgentPrincipal cap = new ChangedAgentPrincipal();
cap.setAgent(agentID);
cap.setWhere(cid);
cap.setOldPrincipal(ev.getOldPrincipal());
cap.setNewPrincipal(ev.getNewPrincipal());
EventRecord er = new EventRecord(cap, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
//__JADE_ONLY__END
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void movedAgent(PlatformEvent ev) {
ContainerID from = ev.getContainer();
ContainerID to = ev.getNewContainer();
AID agentID = ev.getAgent();
MovedAgent ma = new MovedAgent();
ma.setAgent(agentID);
ma.setFrom(from);
ma.setTo(to);
EventRecord er = new EventRecord(ma, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/**
Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void addedMTP(MTPEvent ev) {
Channel ch = ev.getChannel();
ContainerID cid = ev.getPlace();
String proto = ch.getProtocol();
String address = ch.getAddress();
// Add the new address to the platform profile
APTransportDescription mtps = theProfile.getTransportProfile();
MTPDescription desc = findMTPDescription(mtps, proto);
desc.addAddresses(address);
// Update the APDescription file.
if(getState() != AP_INITIATED)
writeAPDescription();
// Retrieve all agent descriptors
AMSAgentDescription amsd = new AMSAgentDescription();
List l = agentDescriptions.search(amsd);
// Add the new address to all the agent descriptors
Iterator it = l.iterator();
while(it.hasNext()) {
AMSAgentDescription ad = (AMSAgentDescription)it.next();
AID name = ad.getName();
name.addAddresses(address);
}
// Generate a suitable AMS event
AddedMTP amtp = new AddedMTP();
amtp.setAddress(address);
amtp.setWhere(cid);
EventRecord er = new EventRecord(amtp, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
//Notify the update of the APDescription...
PlatformDescription ap = new PlatformDescription();
ap.setPlatform(theProfile);
er = new EventRecord(ap, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
/** Post an event to the AMS agent. This method must not be used by
application agents.
*/
public synchronized void removedMTP(MTPEvent ev) {
Channel ch = ev.getChannel();
ContainerID cid = ev.getPlace();
String proto = ch.getProtocol();
String address = ch.getAddress();
// Remove the dead address from the platform profile
APTransportDescription mtps = theProfile.getTransportProfile();
MTPDescription desc = findMTPDescription(mtps, proto);
Iterator addresses = desc.getAllAddresses();
while(addresses.hasNext()) {
// Remove all MTPs that have the 'address' String in their
// address list.
String nextAddr = (String)addresses.next();
if(nextAddr.equalsIgnoreCase(address))
addresses.remove();
}
// Check if there are other addresses left for this MTP: if not,
// remove the MTP from the 'ap-platform-description' object
addresses = desc.getAllAddresses();
if(!addresses.hasNext())
mtps.removeAvailableMtps(desc);
//update the APDescription file
writeAPDescription();
// Remove the dead address from all the registered agents
AID[] agents = myPlatform.agentNames();
AMSAgentDescription amsd = new AMSAgentDescription();
for(int i = 0; i < agents.length; i++) {
amsd.setName(agents[i]);
List l = agentDescriptions.search(amsd);
AMSAgentDescription amsDesc = (AMSAgentDescription)l.get(0);
AID name = amsDesc.getName();
name.removeAddresses(address);
}
// Generate a suitable AMS event
RemovedMTP rmtp = new RemovedMTP();
rmtp.setAddress(address);
rmtp.setWhere(cid);
EventRecord er = new EventRecord(rmtp, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
//Notify the update of the APDescription...
PlatformDescription ap = new PlatformDescription();
ap.setPlatform(theProfile);
er = new EventRecord(ap, here());
er.setWhen(ev.getTime());
eventQueue.add(er);
doWake();
}
public void messageIn(MTPEvent ev) { System.out.println("Message In."); }
public void messageOut(MTPEvent ev) { System.out.println("Message Out."); }
private void writeAPDescription()
{
//Write the APDescription file.
try{
FileWriter f = new FileWriter("APDescription.txt");
f.write(theProfile.toString());
//f.write(s, 0, s.length());
f.write('\n');
f.flush();
f.close();
}catch(java.io.IOException ioe){ioe.printStackTrace();}
}
private MTPDescription findMTPDescription(APTransportDescription mtps, String proto) {
Iterator it = mtps.getAllAvailableMtps();
while(it.hasNext()) {
MTPDescription desc = (MTPDescription)it.next();
if(proto.equalsIgnoreCase(desc.getMtpName()))
return desc;
}
// No MTP was found: create a new one and add it to the
// 'ap-transport-description' object.
MTPDescription desc = new MTPDescription();
desc.setMtpName(proto);
mtps.addAvailableMtps(desc);
return desc;
}
} // End of class ams
|
Fixed bug #71 in the Jitterbug system. Now a check for non-emptiness
is made on the agent descriptions list before using it in removedMTP()
method.
|
src/jade/domain/ams.java
|
Fixed bug #71 in the Jitterbug system. Now a check for non-emptiness is made on the agent descriptions list before using it in removedMTP() method.
|
|
Java
|
lgpl-2.1
|
6a92cc8bda821efb94f6734cf5f0a54cabc0e85e
| 0
|
certusoft/swingx,certusoft/swingx
|
src/java/org/jdesktop/swingx/treetable/TreeModelSupport.java
|
/*
* $Id$
*
* Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* California 95054, U.S.A. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jdesktop.swingx.treetable;
import javax.swing.event.EventListenerList;
import javax.swing.event.TreeModelEvent;
import javax.swing.event.TreeModelListener;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import org.jdesktop.swingx.util.Contract;
/**
* Support for change notification, usable by {@code TreeModel}s.
*
* The changed/inserted/removed is expressed in terms of a {@code TreePath},
* it's up to the client model to build it as appropriate.
*
* This is inspired by {@code AbstractTreeModel} from Christian Kaufhold,
* www.chka.de.
*
* TODO - implement and test precondition failure of added/removed notification
*
* @author JW
*/
@Deprecated
public final class TreeModelSupport {
protected EventListenerList listeners;
private TreeModel treeModel;
/**
* Creates the support class for the given {@code TreeModel}.
*
* @param model the model to support
* @throws NullPointerException if {@code model} is {@code null}
*/
public TreeModelSupport(TreeModel model) {
if (model == null)
throw new NullPointerException("model must not be null");
listeners = new EventListenerList();
this.treeModel = model;
}
//---------------------- structural changes on subtree
/**
* Notifies registered TreeModelListeners that the tree's root has
* been replaced. Can cope with a null root.
*/
public void fireNewRoot() {
Object root = treeModel.getRoot();
/*
* Undocumented. I think it is the only reasonable/possible solution to
* use use null as path if there is no root. TreeModels without root
* aren't important anyway, since JTree doesn't support them (yet).
*/
TreePath path = (root != null) ? new TreePath(root) : null;
fireTreeStructureChanged(path);
}
/**
* Call when a node has changed its leaf state.<p>
*
* PENDING: rename? Do we need it?
* @param path the path to the node with changed leaf state.
*/
public void firePathLeafStateChanged(TreePath path) {
fireTreeStructureChanged(path);
}
/**
* Notifies registered TreeModelListeners that the structure
* below the node identified by the given path has been
* completely changed.
* <p>
* NOTE: the subtree path maybe null if the root is null.
* If not null, it must contain at least one element (the root).
*
* @param subTreePath the path to the root of the subtree
* whose structure was changed.
* @throws NullPointerException if the path is not null but empty
* or contains null elements.
*/
public void fireTreeStructureChanged(TreePath subTreePath) {
if (subTreePath != null) {
Contract.asNotNull(subTreePath.getPath(),
"path must not contain null elements");
}
Object[] pairs = listeners.getListenerList();
TreeModelEvent e = null;
for (int i = pairs.length - 2; i >= 0; i -= 2) {
if (pairs[i] == TreeModelListener.class) {
if (e == null)
e = createStructureChangedEvent(subTreePath);
((TreeModelListener) pairs[i + 1]).treeStructureChanged(e);
}
}
}
//----------------------- node modifications, no mutations
/**
* Notifies registered TreeModelListeners that the
* the node identified by the given path has been modified.
*
* @param path the path to the node that has been modified,
* must not be null and must not contain null path elements.
*
*/
public void firePathChanged(TreePath path) {
Object node = path.getLastPathComponent();
TreePath parentPath = path.getParentPath();
if (parentPath == null)
fireChildrenChanged(path, null, null);
else {
Object parent = parentPath.getLastPathComponent();
fireChildChanged(parentPath, treeModel
.getIndexOfChild(parent, node), node);
}
}
/**
* Notifies registered TreeModelListeners that the given child of
* the node identified by the given parent path has been modified.
* The parent path must not be null, nor empty nor contain null
* elements.
*
* @param parentPath the path to the parent of the modified children.
* @param index the position of the child
* @param child child node that has been modified, must not be null
*/
public void fireChildChanged(TreePath parentPath, int index, Object child) {
fireChildrenChanged(parentPath, new int[] { index },
new Object[] { child });
}
/**
* Notifies registered TreeModelListeners that the given children of
* the node identified by the given parent path have been modified.
* The parent path must not be null, nor empty nor contain null
* elements. Note that the index array must contain the position of the
* corresponding child in the the children array. The indices must be in
* ascending order. <p>
*
* The exception to these rules is if the root itself has been
* modified (which has no parent by definition). In this case
* the path must be the path to the root and both indices and children
* arrays must be null.
*
* @param parentPath the path to the parent of the modified children.
* @param indices the positions of the modified children
* @param children the modified children
*/
public void fireChildrenChanged(TreePath parentPath, int[] indices,
Object[] children) {
Contract.asNotNull(parentPath.getPath(),
"path must not be null and must not contain null elements");
Object[] pairs = listeners.getListenerList();
TreeModelEvent e = null;
for (int i = pairs.length - 2; i >= 0; i -= 2) {
if (pairs[i] == TreeModelListener.class) {
if (e == null)
e = createTreeModelEvent(parentPath, indices, children);
((TreeModelListener) pairs[i + 1]).treeNodesChanged(e);
}
}
}
//------------------------ mutations (insert/remove nodes)
/**
* Notifies registered TreeModelListeners that the child has been added to
* the the node identified by the given parent path at the given position.
* The parent path must not be null, nor empty nor contain null elements.
*
* @param parentPath the path to the parent of added child.
* @param index the position of the added children
* @param child the added child
*/
public void fireChildAdded(TreePath parentPath, int index, Object child) {
fireChildrenAdded(parentPath, new int[] { index },
new Object[] { child });
}
/**
* Notifies registered TreeModelListeners that the child has been removed
* from the node identified by the given parent path from the given position.
* The parent path must not be null, nor empty nor contain null elements.
*
* @param parentPath the path to the parent of removed child.
* @param index the position of the removed children before the removal
* @param child the removed child
*/
public void fireChildRemoved(TreePath parentPath, int index, Object child) {
fireChildrenRemoved(parentPath, new int[] { index },
new Object[] { child });
}
/**
* Notifies registered TreeModelListeners that the given children have been
* added to the the node identified by the given parent path at the given
* locations. The parent path and the child array must not be null, nor
* empty nor contain null elements. Note that the index array must contain
* the position of the corresponding child in the the children array. The
* indices must be in ascending order.
* <p>
*
* @param parentPath the path to the parent of the added children.
* @param indices the positions of the added children.
* @param children the added children.
*/
public void fireChildrenAdded(TreePath parentPath, int[] indices,
Object[] children) {
Object[] pairs = listeners.getListenerList();
TreeModelEvent e = null;
for (int i = pairs.length - 2; i >= 0; i -= 2) {
if (pairs[i] == TreeModelListener.class) {
if (e == null)
e = createTreeModelEvent(parentPath, indices, children);
((TreeModelListener) pairs[i + 1]).treeNodesInserted(e);
}
}
}
/**
* Notifies registered TreeModelListeners that the given children have been
* removed to the the node identified by the given parent path from the
* given locations. The parent path and the child array must not be null,
* nor empty nor contain null elements. Note that the index array must
* contain the position of the corresponding child in the the children
* array. The indices must be in ascending order.
* <p>
*
* @param parentPath the path to the parent of the removed children.
* @param indices the positions of the removed children before the removal
* @param children the removed children
*/
public void fireChildrenRemoved(TreePath parentPath, int[] indices,
Object[] children) {
Object[] pairs = listeners.getListenerList();
TreeModelEvent e = null;
for (int i = pairs.length - 2; i >= 0; i -= 2) {
if (pairs[i] == TreeModelListener.class) {
if (e == null)
e = createTreeModelEvent(parentPath, indices, children);
((TreeModelListener) pairs[i + 1]).treeNodesRemoved(e);
}
}
}
//------------------- factory methods of TreeModelEvents
/**
* Creates and returns a TreeModelEvent for structureChanged
* event notification. The given path may be null to indicate
* setting a null root. In all other cases, the first path element
* must contain the root and the last path element the rootNode of the
* structural change. Specifically, a TreePath with a single element
* (which is the root) denotes a structural change of the complete tree.
*
* @param parentPath the path to the root of the changed structure,
* may be null to indicate setting a null root.
* @return a TreeModelEvent for structureChanged notification.
*
* @see javax.swing.event.TreeModelEvent
* @see javax.swing.event.TreeModelListener
*/
private TreeModelEvent createStructureChangedEvent(TreePath parentPath) {
return createTreeModelEvent(parentPath, null, null);
}
/**
* Creates and returns a TreeModelEvent for changed/inserted/removed
* event notification.
*
* @param parentPath path to parent of modified node
* @param indices the indices of the modified children (before the change)
* @param children the array of modified children
* @return a TreeModelEvent for changed/inserted/removed notification
*
* @see javax.swing.event.TreeModelEvent
* @see javax.swing.event.TreeModelListener
*/
private TreeModelEvent createTreeModelEvent(TreePath parentPath,
int[] indices, Object[] children) {
return new TreeModelEvent(treeModel, parentPath, indices, children);
}
//------------------------ handling listeners
public void addTreeModelListener(TreeModelListener l) {
listeners.add(TreeModelListener.class, l);
}
public TreeModelListener[] getTreeModelListeners() {
return listeners.getListeners(TreeModelListener.class);
}
public void removeTreeModelListener(TreeModelListener l) {
listeners.remove(TreeModelListener.class, l);
}
}
|
Issue number: 836
We almost forgot to remove this deprecated class.
|
src/java/org/jdesktop/swingx/treetable/TreeModelSupport.java
|
Issue number: 836 We almost forgot to remove this deprecated class.
|
||
Java
|
lgpl-2.1
|
0e6d2fad4f5c59887a89679e2c6e6618f5a119ad
| 0
|
dizzzz/exist,adamretter/exist,ambs/exist,dizzzz/exist,lcahlander/exist,ambs/exist,adamretter/exist,eXist-db/exist,ambs/exist,eXist-db/exist,lcahlander/exist,windauer/exist,wolfgangmm/exist,dizzzz/exist,wolfgangmm/exist,windauer/exist,wolfgangmm/exist,lcahlander/exist,wolfgangmm/exist,windauer/exist,dizzzz/exist,adamretter/exist,adamretter/exist,wolfgangmm/exist,eXist-db/exist,ambs/exist,dizzzz/exist,wolfgangmm/exist,adamretter/exist,windauer/exist,eXist-db/exist,ambs/exist,lcahlander/exist,dizzzz/exist,lcahlander/exist,windauer/exist,windauer/exist,eXist-db/exist,ambs/exist,eXist-db/exist,lcahlander/exist,adamretter/exist
|
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001-2018 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.exist.xquery;
import java.io.IOException;
import java.io.Reader;
import java.lang.reflect.Constructor;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nullable;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.Duration;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.stream.XMLStreamException;
import antlr.RecognitionException;
import antlr.TokenStreamException;
import antlr.collections.AST;
import com.evolvedbinary.j8fu.Either;
import com.evolvedbinary.j8fu.function.TriFunctionE;
import com.evolvedbinary.j8fu.function.QuadFunctionE;
import com.evolvedbinary.j8fu.tuple.Tuple2;
import com.ibm.icu.text.Collator;
import net.jcip.annotations.Immutable;
import net.jcip.annotations.ThreadSafe;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.exist.Database;
import org.exist.EXistException;
import org.exist.Namespaces;
import org.exist.collections.Collection;
import org.exist.debuggee.Debuggee;
import org.exist.debuggee.DebuggeeJoint;
import org.exist.dom.persistent.*;
import org.exist.dom.QName;
import org.exist.http.servlets.*;
import org.exist.interpreter.Context;
import org.exist.dom.memtree.InMemoryXMLStreamReader;
import org.exist.dom.memtree.MemTreeBuilder;
import org.exist.dom.memtree.NodeImpl;
import org.exist.numbering.NodeId;
import org.exist.repo.ExistRepository;
import org.exist.security.AuthenticationException;
import org.exist.security.Permission;
import org.exist.security.PermissionDeniedException;
import org.exist.security.Subject;
import org.exist.source.*;
import org.exist.stax.ExtendedXMLStreamReader;
import org.exist.storage.DBBroker;
import org.exist.storage.UpdateListener;
import org.exist.storage.lock.Lock.LockMode;
import org.exist.storage.lock.LockedDocumentMap;
import org.exist.storage.txn.Txn;
import org.exist.util.Collations;
import org.exist.util.Configuration;
import org.exist.util.LockException;
import org.exist.util.hashtable.NamePool;
import org.exist.xmldb.XmldbURI;
import org.exist.xquery.parser.*;
import org.exist.xquery.pragmas.*;
import org.exist.xquery.update.Modification;
import org.exist.xquery.util.SerializerUtils;
import org.exist.xquery.value.*;
import org.w3c.dom.Node;
import static com.evolvedbinary.j8fu.tuple.Tuple.Tuple;
import static javax.xml.XMLConstants.XMLNS_ATTRIBUTE;
import static javax.xml.XMLConstants.XML_NS_PREFIX;
import static org.exist.Namespaces.XML_NS;
/**
* The current XQuery execution context. Contains the static as well as the dynamic
* XQuery context components.
*
* @author <a href="mailto:wolfgang@exist-db.org">Wolfgang Meier</a>
*/
public class XQueryContext implements BinaryValueManager, Context {
private static final Logger LOG = LogManager.getLogger(XQueryContext.class);
public static final String ENABLE_QUERY_REWRITING_ATTRIBUTE = "enable-query-rewriting";
public static final String XQUERY_BACKWARD_COMPATIBLE_ATTRIBUTE = "backwardCompatible";
public static final String XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_ATTRIBUTE = "raise-error-on-failed-retrieval";
public static final String ENFORCE_INDEX_USE_ATTRIBUTE = "enforce-index-use";
//TODO : move elsewhere ?
public static final String BUILT_IN_MODULE_URI_ATTRIBUTE = "uri";
public static final String BUILT_IN_MODULE_CLASS_ATTRIBUTE = "class";
public static final String BUILT_IN_MODULE_SOURCE_ATTRIBUTE = "src";
public static final String PROPERTY_XQUERY_BACKWARD_COMPATIBLE = "xquery.backwardCompatible";
public static final String PROPERTY_ENABLE_QUERY_REWRITING = "xquery.enable-query-rewriting";
public static final String PROPERTY_XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL = "xquery.raise-error-on-failed-retrieval";
public static final boolean XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_DEFAULT = false;
public static final String PROPERTY_ENFORCE_INDEX_USE = "xquery.enforce-index-use";
//TODO : move elsewhere ?
public static final String PROPERTY_BUILT_IN_MODULES = "xquery.modules";
public static final String PROPERTY_STATIC_MODULE_MAP = "xquery.modules.static";
public static final String PROPERTY_MODULE_PARAMETERS = "xquery.modules.parameters";
public static final String JAVA_URI_START = "java:";
//private static final String XMLDB_URI_START = "xmldb:exist://";
private static final String TEMP_STORE_ERROR = "Error occurred while storing temporary data";
public static final String XQUERY_CONTEXTVAR_XQUERY_UPDATE_ERROR = "_eXist_xquery_update_error";
public static final String HTTP_SESSIONVAR_XMLDB_USER = "_eXist_xmldb_user";
public static final String HTTP_REQ_ATTR_USER = "xquery.user";
public static final String HTTP_REQ_ATTR_PASS = "xquery.password";
// Static namespace/prefix mappings
protected Map<String, String> staticNamespaces = new HashMap<>();
// Static prefix/namespace mappings
protected Map<String, String> staticPrefixes = new HashMap<>();
// Local in-scope namespace/prefix mappings in the current context
Map<String, String> inScopeNamespaces = new HashMap<>();
// Local prefix/namespace mappings in the current context
private Map<String, String> inScopePrefixes = new HashMap<>();
// Inherited in-scope namespace/prefix mappings in the current context
private Map<String, String> inheritedInScopeNamespaces = new HashMap<>();
// Inherited prefix/namespace mappings in the current context
private Map<String, String> inheritedInScopePrefixes = new HashMap<>();
private Map<String, XmldbURI> mappedModules = new HashMap<>();
private boolean preserveNamespaces = true;
private boolean inheritNamespaces = true;
// Local namespace stack
private Deque<Map<String, String>> namespaceStack = new ArrayDeque<>();
// Known user defined functions in the local module
private TreeMap<FunctionId, UserDefinedFunction> declaredFunctions = new TreeMap<>();
// Globally declared variables
protected Map<QName, Variable> globalVariables = new TreeMap<>();
// The last element in the linked list of local in-scope variables
private LocalVariable lastVar = null;
private Deque<LocalVariable> contextStack = new ArrayDeque<>();
private Deque<FunctionSignature> callStack = new ArrayDeque<>();
// The current size of the variable stack
private int variableStackSize = 0;
// Unresolved references to user defined functions
private Deque<FunctionCall> forwardReferences = new ArrayDeque<>();
// Inline functions using closures need to be cleared after execution
private Deque<UserDefinedFunction> closures = new ArrayDeque<>();
// List of options declared for this query at compile time - i.e. declare option
private List<Option> staticOptions = null;
// List of options declared for this query at run time - i.e. util:declare-option()
private List<Option> dynamicOptions = null;
//The Calendar for this context : may be changed by some options
private XMLGregorianCalendar calendar = null;
private TimeZone implicitTimeZone = null;
/**
* the watchdog object assigned to this query.
*/
protected XQueryWatchDog watchdog;
/**
* Loaded modules.
*/
protected Map<String, Module> modules = new HashMap<>();
/**
* Loaded modules, including ones bubbled up from imported modules.
*/
private Map<String, Module> allModules = new HashMap<>();
/**
* Used to save current state when modules are imported dynamically
*/
private SavedState savedState = new SavedState();
/**
* Whether some modules were rebound to new instances since the last time this context's query was analyzed. (This assumes that each context is
* attached to at most one query.)
*/
@SuppressWarnings("unused")
private boolean modulesChanged = true;
/**
* The set of statically known documents specified as an array of paths to documents and collections.
*/
private XmldbURI[] staticDocumentPaths = null;
/**
* The actual set of statically known documents. This will be generated on demand from staticDocumentPaths.
*/
private DocumentSet staticDocuments = null;
/**
* The available documents of the dynamic context.
*
* {@see https://www.w3.org/TR/xpath-31/#dt-available-docs}.
*/
private Map<String, TriFunctionE<DBBroker, Txn, String, Either<org.exist.dom.memtree.DocumentImpl, DocumentImpl>, XPathException>> dynamicDocuments = null;
/**
* The available test resources of the dynamic context.
* <p>
* {@see https://www.w3.org/TR/xpath-31/#dt-available-text-resources}.
*/
private Map<Tuple2<String, Charset>, QuadFunctionE<DBBroker, Txn, String, Charset, Reader, XPathException>> dynamicTextResources = null;
/**
* The available collections of the dynamic context.
*
* {@see https://www.w3.org/TR/xpath-31/#dt-available-collections}.
*/
private Map<String, TriFunctionE<DBBroker, Txn, String, Sequence, XPathException>> dynamicCollections = null;
/**
* A set of documents which were modified during the query, usually through an XQuery update extension. The documents will be checked after the
* query completed to see if a defragmentation run is needed.
*/
protected MutableDocumentSet modifiedDocuments = null;
/**
* A general-purpose map to set attributes in the current query context.
*/
protected Map<String, Object> attributes = new HashMap<>();
protected AnyURIValue baseURI = AnyURIValue.EMPTY_URI;
private boolean baseURISetInProlog = false;
protected String moduleLoadPath = ".";
private String defaultFunctionNamespace = Function.BUILTIN_FUNCTION_NS;
private AnyURIValue defaultElementNamespace = AnyURIValue.EMPTY_URI;
private AnyURIValue defaultElementNamespaceSchema = AnyURIValue.EMPTY_URI;
/**
* The default collation URI.
*/
private String defaultCollation = Collations.UNICODE_CODEPOINT_COLLATION_URI;
/**
* Default Collator. Will be null for the default unicode codepoint collation.
*/
private Collator defaultCollator = null;
/**
* Set to true to enable XPath 1.0 backwards compatibility.
*/
private boolean backwardsCompatible = false;
/**
* Should whitespace inside node constructors be stripped?
*/
private boolean stripWhitespace = true;
/**
* Should empty order greatest or least?
*/
private boolean orderEmptyGreatest = true;
/**
* XQuery 3.0 - declare context item :=
*/
private ContextItemDeclaration contextItemDeclaration = null;
/**
* The context item set in the query prolog or externally
*/
private Sequence contextItem = Sequence.EMPTY_SEQUENCE;
/**
* The position of the currently processed item in the context sequence. This field has to be set on demand, for example, before calling the
* fn:position() function.
*/
private int contextPosition = 0;
private Sequence contextSequence = null;
/**
* Shared name pool used by all in-memory documents constructed in this query context.
*/
private NamePool sharedNamePool = null;
/**
* Stack for temporary document fragments.
*/
private Deque<MemTreeBuilder> fragmentStack = new ArrayDeque<>();
/**
* The root of the expression tree.
*/
private Expression rootExpression;
/**
* An incremental counter to count the expressions in the current XQuery. Used during compilation to assign a unique ID to every expression.
*/
private int expressionCounter = 0;
// /**
// * Should all documents loaded by the query be locked? If set to true, it is the responsibility of the calling client code to unlock documents
// * after the query has completed.
// */
// private boolean lockDocumentsOnLoad = false;
// /**
// * Documents locked during the query.
// */
// private LockedDocumentMap lockedDocuments = null;
private LockedDocumentMap protectedDocuments = null;
/**
* The profiler instance used by this context.
*/
protected Profiler profiler;
//For holding the environment variables
private Map<String, String> envs;
private ContextUpdateListener updateListener = null;
private boolean enableOptimizer = true;
private boolean raiseErrorOnFailedRetrieval = XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_DEFAULT;
private boolean isShared = false;
private Source source = null;
private DebuggeeJoint debuggeeJoint = null;
private int xqueryVersion = 31;
protected Database db;
private boolean analyzed = false;
/**
* The Subject of the User that requested the execution of the XQuery
* attached by this Context. This is not the same as the Effective User
* as we may be executed setUid or setGid. The Effective User can be retrieved
* through broker.getCurrentSubject()
*/
private Subject realUser;
/**
* Indicates whether a user from a http session
* was pushed onto the current broker from {@link XQueryContext#prepareForExecution()},
* if so then we must pop the user in {@link XQueryContext#reset(boolean)}
*/
private boolean pushedUserFromHttpSession = false;
/**
* The HTTP context within which the XQuery
* is executing, or null if there is no
* HTTP context.
*/
@Nullable
private HttpContext httpContext = null;
private final Map<QName, DecimalFormat> staticDecimalFormats = new HashMap<>();
private static final QName UNNAMED_DECIMAL_FORMAT = new QName("__UNNAMED__", Function.BUILTIN_FUNCTION_NS);
public XQueryContext() {
profiler = new Profiler(null);
staticDecimalFormats.put(UNNAMED_DECIMAL_FORMAT, DecimalFormat.UNNAMED);
}
public XQueryContext(final Database db) {
this(db, new Profiler(db));
}
public XQueryContext(final Database db, Profiler profiler) {
this();
this.db = db;
loadDefaults(db.getConfiguration());
this.profiler = profiler;
}
public XQueryContext(final XQueryContext copyFrom) {
this();
this.db = copyFrom.db;
loadDefaultNS();
for (final String prefix : copyFrom.staticNamespaces.keySet()) {
if (XML_NS_PREFIX.equals(prefix) || XMLNS_ATTRIBUTE.equals(prefix)) {
continue;
}
try {
declareNamespace(prefix, copyFrom.staticNamespaces.get(prefix));
} catch (final XPathException ex) {
ex.printStackTrace();
}
}
this.profiler = copyFrom.profiler;
}
/**
* Get the HTTP context of the XQuery.
*
* @return the HTTP context, or null if the query
* is not being executed within an HTTP context.
*/
public @Nullable
HttpContext getHttpContext() {
return httpContext;
}
/**
* Set the HTTP context of the XQuery.
*
* @param httpContext the HTTP context within which the XQuery
* is being executed.
*/
public void setHttpContext(final HttpContext httpContext) {
this.httpContext = httpContext;
}
public Optional<ExistRepository> getRepository() {
return getBroker().getBrokerPool().getExpathRepo();
}
/**
* Resolve a Module from the EXPath Repository.
*
* @param namespace namespace URI
* @param prefix namespace prefix
*
* @return the module or null
*
* @throws XPathException if the namespace URI is invalid (XQST0046),
* if the module could not be loaded (XQST0059) or compiled (XPST0003)
*/
private Module resolveInEXPathRepository(final String namespace, final String prefix)
throws XPathException {
// the repo and its eXist handler
final Optional<ExistRepository> repo = getRepository();
// try an internal module
if (repo.isPresent()) {
final Module jMod = repo.get().resolveJavaModule(namespace, this);
if (jMod != null) {
return jMod;
}
}
// try an eXist-specific module
Path resolved = null;
if (repo.isPresent()) {
resolved = repo.get().resolveXQueryModule(namespace);
// use the resolved file or return null
if (resolved == null) {
return null;
}
}
// build a module object from the file
final Source src = new FileSource(resolved, false);
return compileOrBorrowModule(prefix, namespace, "", src);
}
/**
* Prepares the XQuery Context for use.
*
* Should be called before compilation to prepare the query context,
* or before re-execution if the query was cached.
*
* @throws XPathException in case of static error
*/
public void prepareForReuse() throws XPathException {
// prepare the variables of the internal modules (which were previously reset)
try (final Stream<InternalModule> internalModules = allModules
.values()
.stream()
.filter(module -> module instanceof InternalModule)
.map(module -> (InternalModule) module)) {
for (final InternalModule internalModule : internalModules.collect(Collectors.toList())) {
internalModule.prepare(this);
}
}
}
@Override
public boolean hasParent() {
return false;
}
@Override
public XQueryContext getRootContext() {
return this;
}
@Override
public XQueryContext copyContext() {
final XQueryContext ctx = new XQueryContext(this);
copyFields(ctx);
return ctx;
}
@Override
public void updateContext(final XQueryContext from) {
this.watchdog = from.watchdog;
this.lastVar = from.lastVar;
this.variableStackSize = from.getCurrentStackSize();
this.contextStack = from.contextStack;
this.inScopeNamespaces = from.inScopeNamespaces;
this.inScopePrefixes = from.inScopePrefixes;
this.inheritedInScopeNamespaces = from.inheritedInScopeNamespaces;
this.inheritedInScopePrefixes = from.inheritedInScopePrefixes;
this.variableStackSize = from.variableStackSize;
this.attributes = from.attributes;
this.updateListener = from.updateListener;
this.modules = from.modules;
this.allModules = from.allModules;
this.mappedModules = from.mappedModules;
this.dynamicOptions = from.dynamicOptions;
this.staticOptions = from.staticOptions;
this.db = from.db;
this.httpContext = from.httpContext;
}
protected void copyFields(final XQueryContext ctx) {
ctx.calendar = this.calendar;
ctx.implicitTimeZone = this.implicitTimeZone;
ctx.baseURI = this.baseURI;
ctx.baseURISetInProlog = this.baseURISetInProlog;
ctx.staticDocumentPaths = this.staticDocumentPaths;
ctx.staticDocuments = this.staticDocuments;
ctx.dynamicDocuments = this.dynamicDocuments;
ctx.dynamicTextResources = this.dynamicTextResources;
ctx.dynamicCollections = this.dynamicCollections;
ctx.moduleLoadPath = this.moduleLoadPath;
ctx.defaultFunctionNamespace = this.defaultFunctionNamespace;
ctx.defaultElementNamespace = this.defaultElementNamespace;
ctx.defaultCollation = this.defaultCollation;
ctx.defaultCollator = this.defaultCollator;
ctx.backwardsCompatible = this.backwardsCompatible;
ctx.enableOptimizer = this.enableOptimizer;
ctx.stripWhitespace = this.stripWhitespace;
ctx.preserveNamespaces = this.preserveNamespaces;
ctx.inheritNamespaces = this.inheritNamespaces;
ctx.orderEmptyGreatest = this.orderEmptyGreatest;
ctx.declaredFunctions = new TreeMap<>(this.declaredFunctions);
ctx.globalVariables = new TreeMap<>(this.globalVariables);
ctx.attributes = new HashMap<>(this.attributes);
// make imported modules available in the new context
ctx.modules = new HashMap<>();
for (final Module module : this.modules.values()) {
try {
ctx.modules.put(module.getNamespaceURI(), module);
final String prefix = this.staticPrefixes.get(module.getNamespaceURI());
ctx.declareNamespace(prefix, module.getNamespaceURI());
} catch (final XPathException e) {
// ignore
}
}
ctx.allModules = new HashMap<>();
for (final Module module : this.allModules.values()) {
if (module != null) { //UNDERSTAND: why is it possible? -shabanovd
ctx.allModules.put(module.getNamespaceURI(), module);
}
}
ctx.watchdog = this.watchdog;
ctx.profiler = getProfiler();
ctx.lastVar = this.lastVar;
ctx.variableStackSize = getCurrentStackSize();
ctx.contextStack = this.contextStack;
ctx.mappedModules = new HashMap<>(this.mappedModules);
ctx.staticNamespaces = new HashMap<>(this.staticNamespaces);
ctx.staticPrefixes = new HashMap<>(this.staticPrefixes);
if (this.dynamicOptions != null) {
ctx.dynamicOptions = new ArrayList<>(this.dynamicOptions);
}
if (this.staticOptions != null) {
ctx.staticOptions = new ArrayList<>(this.staticOptions);
}
ctx.source = this.source;
ctx.httpContext = this.httpContext;
}
@Override
public void prepareForExecution() {
//if there is an existing user in the current http session
//then set the DBBroker user
final Subject user = getUserFromHttpSession();
if (user != null) {
getBroker().pushSubject(user); //this will be popped in {@link XQueryContext#reset(boolean)}
this.pushedUserFromHttpSession = true;
}
setRealUser(getBroker().getCurrentSubject()); //this will be unset in {@link XQueryContext#reset(boolean)}
//Reset current context position
setContextSequencePosition(0, null);
//Note that, for some reasons, an XQueryContext might be used without calling this method
}
public void setContextItem(final Sequence contextItem) {
this.contextItem = contextItem;
}
public void setContextItemDeclaration(final ContextItemDeclaration contextItemDeclaration) {
this.contextItemDeclaration = contextItemDeclaration;
}
public ContextItemDeclaration getContextItemDeclartion() {
return contextItemDeclaration;
}
public Sequence getContextItem() {
return contextItem;
}
@Override
public boolean isProfilingEnabled() {
return profiler.isEnabled();
}
@Override
public boolean isProfilingEnabled(final int verbosity) {
return profiler.isEnabled() && profiler.verbosity() >= verbosity;
}
@Override
public Profiler getProfiler() {
return profiler;
}
@Override
public void setRootExpression(final Expression expr) {
this.rootExpression = expr;
}
@Override
public Expression getRootExpression() {
return rootExpression;
}
/**
* Returns the next unique expression id. Every expression in the XQuery is identified by a unique id. During compilation, expressions are
* assigned their id by calling this method.
*
* @return The next unique expression id.
*/
int nextExpressionId() {
return expressionCounter++;
}
@Override
public int getExpressionCount() {
return expressionCounter;
}
@Override
public void declareNamespace(String prefix, String uri) throws XPathException {
if (prefix == null) {
prefix = "";
}
if (uri == null) {
uri = "";
}
if (XML_NS_PREFIX.equals(prefix) || XMLNS_ATTRIBUTE.equals(prefix)) {
throw new XPathException(ErrorCodes.XQST0070, "Namespace predefined prefix '" + prefix + "' can not be bound");
}
if (uri.equals(XML_NS)) {
throw new XPathException(ErrorCodes.XQST0070, "Namespace URI '" + uri + "' must be bound to the 'xml' prefix");
}
final String prevURI = staticNamespaces.get(prefix);
//This prefix was not bound
if (prevURI == null) {
if (uri.isEmpty()) {
//Nothing to bind
//TODO : check the specs : unbinding an NS which is not already bound may be disallowed.
LOG.warn("Unbinding unbound prefix '" + prefix + "'");
} else {
//Bind it
staticNamespaces.put(prefix, uri);
staticPrefixes.put(uri, prefix);
}
} else {
//This prefix was bound
//Unbind it
if (uri.isEmpty()) {
// if an empty namespace is specified,
// remove any existing mapping for this namespace
//TODO : improve, since XML_NS can't be unbound
staticPrefixes.remove(uri);
staticNamespaces.remove(prefix);
return;
}
//those prefixes can be rebound to different URIs
if (("xs".equals(prefix) && Namespaces.SCHEMA_NS.equals(prevURI))
|| ("xsi".equals(prefix) && Namespaces.SCHEMA_INSTANCE_NS.equals(prevURI))
|| ("xdt".equals(prefix) && Namespaces.XPATH_DATATYPES_NS.equals(prevURI))
|| ("fn".equals(prefix) && Namespaces.XPATH_FUNCTIONS_NS.equals(prevURI))
|| ("math".equals(prefix)) && Namespaces.XPATH_FUNCTIONS_MATH_NS.equals(prevURI)
|| ("local".equals(prefix) && Namespaces.XQUERY_LOCAL_NS.equals(prevURI))) {
staticPrefixes.remove(prevURI);
staticNamespaces.remove(prefix);
staticNamespaces.put(prefix, uri);
staticPrefixes.put(uri, prefix);
} else {
//Forbids rebinding the *same* prefix in a *different* namespace in this *same* context
if (!uri.equals(prevURI)) {
throw new XPathException(ErrorCodes.XQST0033, "Cannot bind prefix '" + prefix + "' to '" + uri + "' it is already bound to '" + prevURI + "'");
}
}
}
}
@Override
public void declareNamespaces(final Map<String, String> namespaceMap) {
for (final Map.Entry<String, String> entry : namespaceMap.entrySet()) {
String prefix = entry.getKey();
String uri = entry.getValue();
if (prefix == null) {
prefix = "";
}
if (uri == null) {
uri = "";
}
staticNamespaces.put(prefix, uri);
staticPrefixes.put(uri, prefix);
}
}
@Override
public void removeNamespace(final String uri) {
staticPrefixes.remove(uri);
for (final Iterator<String> i = staticNamespaces.values().iterator(); i.hasNext(); ) {
if (i.next().equals(uri)) {
i.remove();
return;
}
}
inScopePrefixes.remove(uri);
if (inScopeNamespaces != null) {
for (final Iterator<String> i = inScopeNamespaces.values().iterator(); i.hasNext(); ) {
if (i.next().equals(uri)) {
i.remove();
return;
}
}
}
inheritedInScopePrefixes.remove(uri);
if (inheritedInScopeNamespaces != null) {
for (final Iterator<String> i = inheritedInScopeNamespaces.values().iterator(); i.hasNext(); ) {
if (i.next().equals(uri)) {
i.remove();
return;
}
}
}
}
@Override
public void declareInScopeNamespace(final String prefix, final String uri) {
if (prefix == null || uri == null) {
throw new IllegalArgumentException("null argument passed to declareNamespace");
}
//Activate the namespace by removing it from the inherited namespaces
if (inheritedInScopePrefixes.containsKey(getURIForPrefix(prefix))) {
inheritedInScopePrefixes.remove(uri);
}
inheritedInScopeNamespaces.remove(prefix);
inScopePrefixes.put(uri, prefix);
inScopeNamespaces.put(prefix, uri);
}
@Override
public String getInScopeNamespace(final String prefix) {
return inScopeNamespaces == null ? null : inScopeNamespaces.get(prefix);
}
@Override
public String getInScopePrefix(final String uri) {
return inScopePrefixes == null ? null : inScopePrefixes.get(uri);
}
public Map<String, String> getInScopePrefixes() {
return inScopePrefixes;
}
@Override
public String getInheritedNamespace(final String prefix) {
return inheritedInScopeNamespaces == null ? null : inheritedInScopeNamespaces.get(prefix);
}
@Override
public String getInheritedPrefix(final String uri) {
return inheritedInScopePrefixes == null ? null : inheritedInScopePrefixes.get(uri);
}
@Override
public String getURIForPrefix(final String prefix) {
// try in-scope namespace declarations
String uri = (inScopeNamespaces == null) ? null : inScopeNamespaces.get(prefix);
if (uri != null) {
return uri;
}
if (inheritNamespaces) {
uri = (inheritedInScopeNamespaces == null) ? null : inheritedInScopeNamespaces.get(prefix);
if (uri != null) {
return uri;
}
}
return staticNamespaces.get(prefix);
/* old code checked namespaces first
String ns = (String) namespaces.get(prefix);
if (ns == null)
// try in-scope namespace declarations
return inScopeNamespaces == null
? null
: (String) inScopeNamespaces.get(prefix);
else
return ns;
*/
}
@Override
public String getPrefixForURI(final String uri) {
String prefix = (inScopePrefixes == null) ? null : inScopePrefixes.get(uri);
if (prefix != null) {
return prefix;
}
if (inheritNamespaces) {
prefix = (inheritedInScopePrefixes == null) ? null : inheritedInScopePrefixes.get(uri);
if (prefix != null) {
return prefix;
}
}
return staticPrefixes.get(uri);
}
@Override
public String getDefaultFunctionNamespace() {
return defaultFunctionNamespace;
}
@Override
public void setDefaultFunctionNamespace(final String uri) throws XPathException {
//Not sure for the 2nd clause : eXist-db forces the function NS as default.
if ((defaultFunctionNamespace != null) && !defaultFunctionNamespace.equals(Function.BUILTIN_FUNCTION_NS) && !defaultFunctionNamespace.equals(uri)) {
throw new XPathException(ErrorCodes.XQST0066, "Default function namespace is already set to: '" + defaultFunctionNamespace + "'");
}
defaultFunctionNamespace = uri;
}
@Override
public String getDefaultElementNamespaceSchema() throws XPathException {
return defaultElementNamespaceSchema.getStringValue();
}
@Override
public void setDefaultElementNamespaceSchema(final String uri) throws XPathException {
// eXist forces the empty element NS as default.
if (!defaultElementNamespaceSchema.equals(AnyURIValue.EMPTY_URI)) {
throw new XPathException(ErrorCodes.XQST0066, "Default function namespace schema is already set to: '" + defaultElementNamespaceSchema.getStringValue() + "'");
}
defaultElementNamespaceSchema = new AnyURIValue(uri);
}
@Override
public String getDefaultElementNamespace() throws XPathException {
return defaultElementNamespace.getStringValue();
}
@Override
public void setDefaultElementNamespace(final String uri, @Nullable final String schema) throws XPathException {
// eXist forces the empty element NS as default.
if (!defaultElementNamespace.equals(AnyURIValue.EMPTY_URI)) {
throw new XPathException(ErrorCodes.XQST0066,
"Default element namespace is already set to: '" + defaultElementNamespace.getStringValue() + "'");
}
defaultElementNamespace = new AnyURIValue(uri);
if (schema != null) {
defaultElementNamespaceSchema = new AnyURIValue(schema);
}
}
@Override
public void setDefaultCollation(final String uri) throws XPathException {
if (uri.equals(Collations.UNICODE_CODEPOINT_COLLATION_URI) || uri.equals(Collations.CODEPOINT_SHORT)) {
defaultCollation = Collations.UNICODE_CODEPOINT_COLLATION_URI;
defaultCollator = null;
}
final URI uriTest;
try {
uriTest = new URI(uri);
} catch (final URISyntaxException e) {
throw new XPathException(ErrorCodes.XQST0038, "Unknown collation : '" + uri + "'");
}
if (uri.startsWith(Collations.EXIST_COLLATION_URI) || uri.charAt(0) == '?' || uriTest.isAbsolute()) {
defaultCollator = Collations.getCollationFromURI(uri);
defaultCollation = uri;
} else {
String absUri = getBaseURI().getStringValue() + uri;
defaultCollator = Collations.getCollationFromURI(absUri);
defaultCollation = absUri;
}
}
@Override
public String getDefaultCollation() {
return defaultCollation;
}
@Override
public Collator getCollator(final String uri) throws XPathException {
if (uri == null) {
return defaultCollator;
}
return Collations.getCollationFromURI(uri);
}
@Override
public Collator getDefaultCollator() {
return defaultCollator;
}
@Override
public void setStaticallyKnownDocuments(final XmldbURI[] docs) {
staticDocumentPaths = docs;
}
@Override
public void setStaticallyKnownDocuments(final DocumentSet set) {
staticDocuments = set;
}
public void addDynamicallyAvailableDocument(final String uri,
final TriFunctionE<DBBroker, Txn, String, Either<org.exist.dom.memtree.DocumentImpl, DocumentImpl>, XPathException> supplier) {
if (dynamicDocuments == null) {
dynamicDocuments = new HashMap<>();
}
dynamicDocuments.put(uri, supplier);
}
public void addDynamicallyAvailableTextResource(final String uri, final Charset encoding,
final QuadFunctionE<DBBroker, Txn, String, Charset, Reader, XPathException> supplier) {
if (dynamicTextResources == null) {
dynamicTextResources = new HashMap<>();
}
dynamicTextResources.put(Tuple(uri, encoding), supplier);
}
public void addDynamicallyAvailableCollection(final String uri,
final TriFunctionE<DBBroker, Txn, String, Sequence, XPathException> supplier) {
if (dynamicCollections == null) {
dynamicCollections = new HashMap<>();
}
dynamicCollections.put(uri, supplier);
}
@Override
public void setCalendar(final XMLGregorianCalendar newCalendar) {
this.calendar = (XMLGregorianCalendar) newCalendar.clone();
}
@Override
public void setTimeZone(final TimeZone newTimeZone) {
this.implicitTimeZone = newTimeZone;
}
@Override
public XMLGregorianCalendar getCalendar() {
//TODO : we might prefer to return null
if (calendar == null) {
try {
//Initialize to current dateTime
calendar = DatatypeFactory.newInstance().newXMLGregorianCalendar(new GregorianCalendar());
} catch (final DatatypeConfigurationException e) {
LOG.error(e.getMessage(), e);
}
}
//That's how we ensure stability of that static context function
return calendar;
}
@Override
public TimeZone getImplicitTimeZone() {
if (implicitTimeZone == null) {
implicitTimeZone = TimeZone.getDefault();
if (implicitTimeZone.inDaylightTime(new Date())) {
implicitTimeZone.setRawOffset(implicitTimeZone.getRawOffset() + implicitTimeZone.getDSTSavings());
}
}
//That's how we ensure stability of that static context function
return this.implicitTimeZone;
}
@Override
public DocumentSet getStaticallyKnownDocuments() throws XPathException {
if (staticDocuments != null) {
// the document set has already been built, return it
return staticDocuments;
}
if (protectedDocuments != null) {
staticDocuments = protectedDocuments.toDocumentSet();
return staticDocuments;
}
final MutableDocumentSet ndocs = new DefaultDocumentSet(40);
if (staticDocumentPaths == null) {
// no path defined: return all documents in the db
try {
getBroker().getAllXMLResources(ndocs);
} catch (final PermissionDeniedException | LockException e) {
LOG.warn(e);
throw new XPathException("Permission denied to read resource all resources: " + e.getMessage(), e);
}
} else {
for (final XmldbURI staticDocumentPath : staticDocumentPaths) {
try {
final Collection collection = getBroker().getCollection(staticDocumentPath);
if (collection != null) {
collection.allDocs(getBroker(), ndocs, true);
} else {
try (final LockedDocument lockedDocument = getBroker().getXMLResource(staticDocumentPath, LockMode.READ_LOCK)) {
final DocumentImpl doc = lockedDocument == null ? null : lockedDocument.getDocument();
if (doc != null) {
if (doc.getPermissions().validate(
getBroker().getCurrentSubject(), Permission.READ)) {
ndocs.add(doc);
}
}
}
}
} catch (final PermissionDeniedException | LockException e) {
LOG.warn("Permission denied to read resource " + staticDocumentPath + ". Skipping it.");
}
}
}
staticDocuments = ndocs;
return staticDocuments;
}
public DocumentSet getStaticDocs() {
return staticDocuments;
}
/**
* Get's a document from the "Available documents" of the
* dynamic context.
*
* @param uri the URI by which the document was registered
* @return sequence of available documents matching the URI
* @throws XPathException in case of dynamic error
*/
public @Nullable
Sequence getDynamicallyAvailableDocument(final String uri) throws XPathException {
if (dynamicDocuments == null) {
return null;
}
final TriFunctionE<DBBroker, Txn, String, Either<org.exist.dom.memtree.DocumentImpl, DocumentImpl>, XPathException> docSupplier
= dynamicDocuments.get(uri);
if (docSupplier == null) {
return null;
}
return docSupplier.apply(getBroker(), getBroker().getCurrentTransaction(), uri).fold(md -> md, pd -> (Sequence) pd);
}
/**
* Get's a text resource from the "Available text resources" of the
* dynamic context.
*
* @param uri the URI by which the document was registered
* @param charset the charset to use for retrieving the resource
* @return a reader to read the resource content from
* @throws XPathException in case of a dynamic error
*/
public @Nullable
Reader getDynamicallyAvailableTextResource(final String uri, final Charset charset)
throws XPathException {
if (dynamicTextResources == null) {
return null;
}
final QuadFunctionE<DBBroker, Txn, String, Charset, Reader, XPathException> textResourceSupplier
= dynamicTextResources.get(Tuple(uri, charset));
if (textResourceSupplier == null) {
return null;
}
return textResourceSupplier.apply(getBroker(), getBroker().getCurrentTransaction(), uri, charset);
}
/**
* Get's a collection from the "Available collections" of the
* dynamic context.
*
* @param uri the URI of the collection to retrieve
* @return a sequence of document nodes
* @throws XPathException in case of dynamic error
*/
public @Nullable
Sequence getDynamicallyAvailableCollection(final String uri) throws XPathException {
if (dynamicCollections == null) {
return null;
}
final TriFunctionE<DBBroker, Txn, String, Sequence, XPathException> collectionSupplier
= dynamicCollections.get(uri);
if (collectionSupplier == null) {
return null;
}
return collectionSupplier.apply(getBroker(), getBroker().getCurrentTransaction(), uri);
}
@Override
public ExtendedXMLStreamReader getXMLStreamReader(final NodeValue nv) throws XMLStreamException, IOException {
final ExtendedXMLStreamReader reader;
if (nv.getImplementationType() == NodeValue.IN_MEMORY_NODE) {
final NodeImpl node = (NodeImpl) nv;
final org.exist.dom.memtree.DocumentImpl ownerDoc = node.getNodeType() == Node.DOCUMENT_NODE ? (org.exist.dom.memtree.DocumentImpl) node : node.getOwnerDocument();
reader = new InMemoryXMLStreamReader(ownerDoc, ownerDoc);
} else {
final NodeProxy proxy = (NodeProxy) nv;
reader = getBroker().newXMLStreamReader(new NodeProxy(proxy.getOwnerDocument(), NodeId.DOCUMENT_NODE, proxy.getOwnerDocument().getFirstChildAddress()), false);
}
return reader;
}
@Override
public void setProtectedDocs(final LockedDocumentMap map) {
this.protectedDocuments = map;
}
@Override
public LockedDocumentMap getProtectedDocs() {
return this.protectedDocuments;
}
@Override
public boolean inProtectedMode() {
return protectedDocuments != null;
}
@Override
public boolean lockDocumentsOnLoad() {
return false;
}
// /**
// * If lock is true, all documents loaded during query execution
// * will be locked. This way, we avoid that query results become
// * invalid before the entire result has been processed by the client
// * code. All attempts to modify nodes which are part of the result
// * set will be blocked.
// *
// * However, it is the client's responsibility to proper unlock
// * all documents once processing is completed.
// *
// * @param lock
// */
// public void setLockDocumentsOnLoad(boolean lock) {
// lockDocumentsOnLoad = lock;
// if(lock)
// lockedDocuments = new LockedDocumentMap();
// }
@Override
public void addLockedDocument(final DocumentImpl doc) {
// if (lockedDocuments != null)
// lockedDocuments.add(doc);
}
// /**
// * Release all locks on documents that have been locked
// * during query execution.
// *
// *@see #setLockDocumentsOnLoad(boolean)
// */
// public void releaseLockedDocuments() {
// if(lockedDocuments != null)
// lockedDocuments.unlock();
// lockDocumentsOnLoad = false;
// lockedDocuments = null;
// }
// /**
// * Release all locks on documents not being referenced by the sequence.
// * This is called after query execution has completed. Only locks on those
// * documents contained in the final result set will be preserved. All other
// * locks are released as they are no longer needed.
// *
// * @param seq
// * @throws XPathException
// */
// public LockedDocumentMap releaseUnusedDocuments(Sequence seq) throws XPathException {
// if(lockedDocuments == null)
// return null;
// // determine the set of documents referenced by nodes in the sequence
// DocumentSet usedDocs = new DocumentSet();
// for(SequenceIterator i = seq.iterate(); i.hasNext(); ) {
// Item next = i.nextItem();
// if(Type.subTypeOf(next.getType(), Type.NODE)) {
// NodeValue node = (NodeValue) next;
// if(node.getImplementationType() == NodeValue.PERSISTENT_NODE) {
// DocumentImpl doc = ((NodeProxy)node).getDocument();
// if(!usedDocs.contains(doc.getDocId()))
// usedDocs.add(doc, false);
// }
// }
// }
// LockedDocumentMap remaining = lockedDocuments.unlockSome(usedDocs);
// lockDocumentsOnLoad = false;
// lockedDocuments = null;
// return remaining;
// }
@Override
public void setShared(final boolean shared) {
isShared = shared;
}
@Override
public boolean isShared() {
return isShared;
}
@Override
public void addModifiedDoc(final DocumentImpl document) {
if (modifiedDocuments == null) {
modifiedDocuments = new DefaultDocumentSet();
}
modifiedDocuments.add(document);
}
@Override
public void reset() {
reset(false);
}
@Override
public void reset(final boolean keepGlobals) {
setRealUser(null);
if (this.pushedUserFromHttpSession) {
try {
getBroker().popSubject();
} finally {
this.pushedUserFromHttpSession = false;
}
}
if (modifiedDocuments != null) {
try {
Modification.checkFragmentation(this, modifiedDocuments);
} catch (final LockException | EXistException e) {
LOG.warn("Error while checking modified documents: " + e.getMessage(), e);
}
modifiedDocuments = null;
}
calendar = null;
implicitTimeZone = null;
resetDocumentBuilder();
contextSequence = null;
contextItem = Sequence.EMPTY_SEQUENCE;
if (!keepGlobals) {
// do not reset the statically known documents
staticDocumentPaths = null;
staticDocuments = null;
dynamicDocuments = null;
dynamicTextResources = null;
dynamicCollections = null;
}
if (!isShared) {
lastVar = null;
}
// clear inline functions using closures
closures.forEach(func -> func.setClosureVariables(null));
closures.clear();
fragmentStack = new ArrayDeque<>();
callStack.clear();
protectedDocuments = null;
if (!keepGlobals) {
globalVariables.clear();
}
if (dynamicOptions != null) {
dynamicOptions.clear(); //clear any dynamic options
}
if (!isShared) {
watchdog.reset();
}
for (final Module module : allModules.values()) {
module.reset(this, keepGlobals);
}
if (!keepGlobals) {
mappedModules.clear();
}
savedState.restore();
attributes.clear();
clearUpdateListeners();
profiler.reset();
if (!keepGlobals) {
httpContext = null;
}
analyzed = false;
}
@Override
public boolean stripWhitespace() {
return stripWhitespace;
}
@Override
public void setStripWhitespace(final boolean strip) {
this.stripWhitespace = strip;
}
@Override
public boolean preserveNamespaces() {
return preserveNamespaces;
}
@Override
public void setPreserveNamespaces(final boolean preserve) {
this.preserveNamespaces = preserve;
}
@Override
public boolean inheritNamespaces() {
return inheritNamespaces;
}
@Override
public void setInheritNamespaces(final boolean inherit) {
this.inheritNamespaces = inherit;
}
@Override
public boolean orderEmptyGreatest() {
return orderEmptyGreatest;
}
@Override
public void setOrderEmptyGreatest(final boolean order) {
this.orderEmptyGreatest = order;
}
@Override
public Iterator<Module> getModules() {
return modules.values().iterator();
}
@Override
public Iterator<Module> getRootModules() {
return getAllModules();
}
@Override
public Iterator<Module> getAllModules() {
return allModules.values().iterator();
}
@Override
@Nullable
public Module getModule(final String namespaceURI) {
return modules.get(namespaceURI);
}
@Override
public Module getRootModule(final String namespaceURI) {
return allModules.get(namespaceURI);
}
@Override
public void setModule(final String namespaceURI, final Module module) {
if (module == null) {
modules.remove(namespaceURI); // unbind the module
} else {
modules.put(namespaceURI, module);
}
setRootModule(namespaceURI, module);
}
protected void setRootModule(final String namespaceURI, final Module module) {
if (module == null) {
allModules.remove(namespaceURI); // unbind the module
return;
}
if (allModules.get(namespaceURI) != module) {
setModulesChanged();
}
allModules.put(namespaceURI, module);
}
protected void setModulesChanged() {
this.modulesChanged = true;
}
@Override
public boolean checkModulesValid() {
for (final Module module : allModules.values()) {
if (!module.isInternalModule()) {
if (!((ExternalModule) module).moduleIsValid(getBroker())) {
if (LOG.isDebugEnabled()) {
LOG.debug("Module with URI " + module.getNamespaceURI() + " has changed and needs to be reloaded");
}
return false;
}
}
}
return true;
}
@Override
public void analyzeAndOptimizeIfModulesChanged(final Expression expr) throws XPathException {
if (analyzed) {
return;
}
analyzed = true;
for (final Module module : expr.getContext().modules.values()) {
if (!module.isInternalModule()) {
final Expression root = ((ExternalModule) module).getRootExpression();
((ExternalModule) module).getContext().analyzeAndOptimizeIfModulesChanged(root);
}
}
expr.analyze(new AnalyzeContextInfo());
if (optimizationsEnabled()) {
final Optimizer optimizer = new Optimizer(this);
expr.accept(optimizer);
if (optimizer.hasOptimized()) {
reset(true);
expr.resetState(true);
expr.analyze(new AnalyzeContextInfo());
}
}
modulesChanged = false;
}
@Override
@Nullable
public Module loadBuiltInModule(final String namespaceURI, final String moduleClass) {
Module module = null;
if (namespaceURI != null) {
module = getModule(namespaceURI);
}
if (module != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("module " + namespaceURI + " is already present");
}
return module;
}
return initBuiltInModule(namespaceURI, moduleClass);
}
@SuppressWarnings("unchecked")
Module initBuiltInModule(final String namespaceURI, final String moduleClass) {
Module module = null;
try {
// lookup the class
final ClassLoader existClassLoader = getBroker().getBrokerPool().getClassLoader();
final Class<?> mClass = Class.forName(moduleClass, false, existClassLoader);
if (!(Module.class.isAssignableFrom(mClass))) {
LOG.info("failed to load module. " + moduleClass + " is not an instance of org.exist.xquery.Module.");
return null;
}
//instantiateModule( namespaceURI, (Class<Module>)mClass );
// INOTE: expathrepo
module = instantiateModule(namespaceURI, (Class<Module>) mClass, (Map<String, Map<String, List<? extends Object>>>) getBroker().getConfiguration().getProperty(PROPERTY_MODULE_PARAMETERS));
if (LOG.isDebugEnabled()) {
LOG.debug("module " + module.getNamespaceURI() + " loaded successfully.");
}
} catch (final ClassNotFoundException e) {
LOG.warn("module class " + moduleClass + " not found. Skipping...");
}
return module;
}
@SuppressWarnings("unchecked")
private Module instantiateModule(final String namespaceURI, final Class<Module> mClazz,
final Map<String, Map<String, List<? extends Object>>> moduleParameters) {
Module module = null;
try {
try {
// attempt for a constructor that takes 1 argument
final Constructor<Module> cstr1 = mClazz.getConstructor(Map.class);
module = cstr1.newInstance(moduleParameters.get(namespaceURI));
} catch (final NoSuchMethodException nsme) {
// attempt for a constructor that takes 0 arguments
module = mClazz.newInstance();
}
if (namespaceURI != null && !module.getNamespaceURI().equals(namespaceURI)) {
LOG.warn("the module declares a different namespace URI. Expected: " + namespaceURI + " found: " + module.getNamespaceURI());
return null;
}
if (getPrefixForURI(module.getNamespaceURI()) == null && !module.getDefaultPrefix().isEmpty()) {
declareNamespace(module.getDefaultPrefix(), module.getNamespaceURI());
}
modules.put(module.getNamespaceURI(), module);
allModules.put(module.getNamespaceURI(), module);
if (module instanceof InternalModule) {
((InternalModule) module).prepare(this);
}
} catch (final Throwable e) {
if (e instanceof InterruptedException) {
// NOTE: must set interrupted flag
Thread.currentThread().interrupt();
}
LOG.warn("error while instantiating module class " + mClazz.getName(), e);
}
return module;
}
@Override
public void declareFunction(final UserDefinedFunction function) throws XPathException {
// TODO: redeclaring functions should be forbidden. however, throwing an
// exception will currently break util:eval.
final QName name = function.getSignature().getName();
if (XML_NS.equals(name.getNamespaceURI())) {
throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + XML_NS + "'");
}
if (Namespaces.SCHEMA_NS.equals(name.getNamespaceURI())) {
throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.SCHEMA_NS + "'");
}
if (Namespaces.SCHEMA_INSTANCE_NS.equals(name.getNamespaceURI())) {
throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.SCHEMA_INSTANCE_NS + "'");
}
if (Namespaces.XPATH_FUNCTIONS_NS.equals(name.getNamespaceURI())) {
throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.XPATH_FUNCTIONS_NS + "'");
}
if (name.getNamespaceURI().isEmpty()) {
throw new XPathException(function, ErrorCodes.XQST0060, "Every declared function name must have a non-null namespace URI, but function '" + name + "' does not meet this requirement.");
}
declaredFunctions.put(function.getSignature().getFunctionId(), function);
// if (declaredFunctions.get(function.getSignature().getFunctionId()) == null)
// declaredFunctions.put(function.getSignature().getFunctionId(), function);
// else
// throw new XPathException("XQST0034: function " + function.getName() + " is already defined with the same arity");
}
@Override
@Nullable
public UserDefinedFunction resolveFunction(final QName name, final int argCount) throws XPathException {
final FunctionId id = new FunctionId(name, argCount);
return declaredFunctions.get(id);
}
@Override
public Iterator<FunctionSignature> getSignaturesForFunction(final QName name) {
final ArrayList<FunctionSignature> signatures = new ArrayList<>(2);
for (final UserDefinedFunction func : declaredFunctions.values()) {
if (func.getName().equals(name)) {
signatures.add(func.getSignature());
}
}
return signatures.iterator();
}
@Override
public Iterator<UserDefinedFunction> localFunctions() {
return declaredFunctions.values().iterator();
}
@Override
public LocalVariable declareVariableBinding(final LocalVariable var) throws XPathException {
if (lastVar == null) {
lastVar = var;
} else {
lastVar.addAfter(var);
lastVar = var;
}
var.setStackPosition(getCurrentStackSize());
return var;
}
@Override
public Variable declareGlobalVariable(final Variable var) {
globalVariables.put(var.getQName(), var);
var.setStackPosition(getCurrentStackSize());
return var;
}
@Override
public void undeclareGlobalVariable(final QName name) {
globalVariables.remove(name);
}
@Override
public Variable declareVariable(final String qname, final Object value) throws XPathException {
try {
return declareVariable(QName.parse(this, qname, null), value);
} catch (final QName.IllegalQNameException e) {
throw new XPathException(ErrorCodes.XPST0081, "No namespace defined for prefix: " + qname);
}
}
@Override
public Variable declareVariable(final QName qn, final Object value) throws XPathException {
Variable var;
final Module module = getModule(qn.getNamespaceURI());
if (module != null) {
var = module.declareVariable(qn, value);
return var;
}
final Sequence val = XPathUtil.javaObjectToXPath(value, this);
var = globalVariables.get(qn);
if (var == null) {
var = new VariableImpl(qn);
globalVariables.put(qn, var);
}
if (var.getSequenceType() != null) {
int actualCardinality;
if (val.isEmpty()) {
actualCardinality = Cardinality.EMPTY;
} else if (val.hasMany()) {
actualCardinality = Cardinality.MANY;
} else {
actualCardinality = Cardinality.ONE;
}
//Type.EMPTY is *not* a subtype of other types ; checking cardinality first
if (!Cardinality.checkCardinality(var.getSequenceType().getCardinality(), actualCardinality)) {
throw new XPathException("XPTY0004: Invalid cardinality for variable $" + var.getQName() + ". Expected " + Cardinality.getDescription(var.getSequenceType().getCardinality()) + ", got " + Cardinality.getDescription(actualCardinality));
}
//TODO : ignore nodes right now ; they are returned as xs:untypedAtomicType
if (!Type.subTypeOf(var.getSequenceType().getPrimaryType(), Type.NODE)) {
if (!val.isEmpty() && !Type.subTypeOf(val.getItemType(), var.getSequenceType().getPrimaryType())) {
throw new XPathException("XPTY0004: Invalid type for variable $" + var.getQName() + ". Expected " + Type.getTypeName(var.getSequenceType().getPrimaryType()) + ", got " + Type.getTypeName(val.getItemType()));
}
//Here is an attempt to process the nodes correctly
} else {
//Same as above : we probably may factorize
if (!val.isEmpty() && !Type.subTypeOf(val.getItemType(), var.getSequenceType().getPrimaryType())) {
throw new XPathException("XPTY0004: Invalid type for variable $" + var.getQName() + ". Expected " + Type.getTypeName(var.getSequenceType().getPrimaryType()) + ", got " + Type.getTypeName(val.getItemType()));
}
}
}
//TODO : should we allow global variable *re*declaration ?
var.setValue(val);
return var;
}
@Override
public Variable resolveVariable(final String name) throws XPathException {
try {
final QName qn = QName.parse(this, name, null);
return resolveVariable(qn);
} catch (final QName.IllegalQNameException e) {
throw new XPathException(ErrorCodes.XPST0081, "No namespace defined for prefix " + name);
}
}
@Override
public Variable resolveVariable(final QName qname) throws XPathException {
// check if the variable is declared local
Variable var = resolveLocalVariable(qname);
// check if the variable is declared in a module
if (var == null) {
final Module module = getModule(qname.getNamespaceURI());
if (module != null) {
var = module.resolveVariable(qname);
}
}
// check if the variable is declared global
if (var == null) {
var = globalVariables.get(qname);
}
//if (var == null)
// throw new XPathException("variable $" + qname + " is not bound");
return var;
}
Variable resolveGlobalVariable(final QName qname) {
return globalVariables.get(qname);
}
protected Variable resolveLocalVariable(final QName qname) throws XPathException {
final LocalVariable end = contextStack.peek();
for (LocalVariable var = lastVar; var != null; var = var.before) {
if (var == end) {
return null;
}
if (qname.equals(var.getQName())) {
return var;
}
}
return null;
}
@Override
public boolean isVarDeclared(final QName qname) {
final Module module = getModule(qname.getNamespaceURI());
if (module != null) {
if (module.isVarDeclared(qname)) {
return true;
}
}
return globalVariables.get(qname) != null;
}
@Override
public Map<QName, Variable> getVariables() {
final Map<QName, Variable> variables = new HashMap<>(globalVariables);
LocalVariable end = contextStack.peek();
for (LocalVariable var = lastVar; var != null; var = var.before) {
if (var == end) {
break;
}
variables.put(var.getQName(), var);
}
return variables;
}
@Override
public Map<QName, Variable> getLocalVariables() {
final Map<QName, Variable> variables = new HashMap<>();
LocalVariable end = contextStack.peek();
for (LocalVariable var = lastVar; var != null; var = var.before) {
if (var == end) {
break;
}
variables.put(var.getQName(), var);
}
return variables;
}
/**
* Return a copy of all currently visible local variables.
* Used by {@link InlineFunction} to implement closures.
*
* @return currently visible local variables as a stack
*/
public List<ClosureVariable> getLocalStack() {
List<ClosureVariable> closure = null;
final LocalVariable end = contextStack.peek();
for (LocalVariable var = lastVar; var != null; var = var.before) {
if (var == end) {
break;
}
if (closure == null) {
closure = new ArrayList<>(6);
}
closure.add(new ClosureVariable(var));
}
return closure;
}
@Override
public Map<QName, Variable> getGlobalVariables() {
return new HashMap<>(globalVariables);
}
/**
* Restore a saved stack of local variables. Used to implement closures.
*
* @param stack the stack of local variables
* @throws XPathException if the stack cannot be restored
*/
public void restoreStack(final List<ClosureVariable> stack) throws XPathException {
for (int i = stack.size() - 1; i > -1; i--) {
declareVariableBinding(new ClosureVariable(stack.get(i)));
}
}
@Override
public void setBackwardsCompatibility(boolean backwardsCompatible) {
this.backwardsCompatible = backwardsCompatible;
}
@Override
public boolean isBackwardsCompatible() {
return this.backwardsCompatible;
}
@Override
public boolean isRaiseErrorOnFailedRetrieval() {
return raiseErrorOnFailedRetrieval;
}
public Database getDatabase() {
return db;
}
@Override
public DBBroker getBroker() {
return db.getActiveBroker();
}
@Override
public Subject getSubject() {
return getBroker().getCurrentSubject();
}
/**
* If there is a HTTP Session, and a User has been stored in the session then this will return the user object from the session.
*
* @return The user or null if there is no session or no user
*/
Subject getUserFromHttpSession() {
final Optional<RequestWrapper> maybeRequest = Optional.ofNullable(getHttpContext())
.map(HttpContext::getRequest);
if (maybeRequest.isPresent()) {
final RequestWrapper request = maybeRequest.get();
final Object user = request.getAttribute(HTTP_REQ_ATTR_USER);
final Object passAttr = request.getAttribute(HTTP_REQ_ATTR_PASS);
if (user != null) {
final String password = passAttr == null ? null : passAttr.toString();
try {
return getBroker().getBrokerPool().getSecurityManager().authenticate(user.toString(), password);
} catch (final AuthenticationException e) {
LOG.error("User can not be authenticated: " + user.toString());
}
} else {
final Optional<SessionWrapper> maybeSession = Optional.ofNullable(getHttpContext())
.map(HttpContext::getSession);
if (maybeSession.isPresent()) {
return (Subject) maybeSession.get().getAttribute(HTTP_SESSIONVAR_XMLDB_USER);
}
}
}
return null;
}
/**
* The builder used for creating in-memory document fragments.
*/
private MemTreeBuilder documentBuilder = null;
@Override
public MemTreeBuilder getDocumentBuilder() {
if (documentBuilder == null) {
documentBuilder = new MemTreeBuilder(this);
documentBuilder.startDocument();
}
return documentBuilder;
}
@Override
public MemTreeBuilder getDocumentBuilder(final boolean explicitCreation) {
if (documentBuilder == null) {
documentBuilder = new MemTreeBuilder(this);
documentBuilder.startDocument(explicitCreation);
}
return documentBuilder;
}
private void resetDocumentBuilder() {
this.documentBuilder = null;
}
private void setDocumentBuilder(final MemTreeBuilder documentBuilder) {
this.documentBuilder = documentBuilder;
}
@Override
public NamePool getSharedNamePool() {
if (sharedNamePool == null) {
sharedNamePool = new NamePool();
}
return sharedNamePool;
}
@Override
public XQueryContext getContext() {
return null;
}
@Override
public void prologEnter(final Expression expr) {
if (debuggeeJoint != null) {
debuggeeJoint.prologEnter(expr);
}
}
@Override
public void expressionStart(final Expression expr) throws TerminatedException {
if (debuggeeJoint != null) {
debuggeeJoint.expressionStart(expr);
}
}
@Override
public void expressionEnd(final Expression expr) {
if (debuggeeJoint != null) {
debuggeeJoint.expressionEnd(expr);
}
}
@Override
public void stackEnter(final Expression expr) throws TerminatedException {
if (debuggeeJoint != null) {
debuggeeJoint.stackEnter(expr);
}
}
@Override
public void stackLeave(final Expression expr) {
if (debuggeeJoint != null) {
debuggeeJoint.stackLeave(expr);
}
}
@Override
public void proceed() throws TerminatedException {
getWatchDog().proceed(null);
}
@Override
public void proceed(final Expression expr) throws TerminatedException {
getWatchDog().proceed(expr);
}
@Override
public void proceed(final Expression expr, final MemTreeBuilder builder) throws TerminatedException {
getWatchDog().proceed(expr, builder);
}
@Override
public void setWatchDog(final XQueryWatchDog watchdog) {
this.watchdog = watchdog;
}
@Override
public XQueryWatchDog getWatchDog() {
return watchdog;
}
private static final MemTreeBuilder NULL_DOCUMENT_BUILDER = new MemTreeBuilder();
@Override
public void pushDocumentContext() {
if (documentBuilder == null) {
fragmentStack.push(NULL_DOCUMENT_BUILDER);
} else {
fragmentStack.push(documentBuilder);
resetDocumentBuilder();
}
}
@Override
public void popDocumentContext() {
if (!fragmentStack.isEmpty()) {
final MemTreeBuilder prevBuilder = fragmentStack.pop();
if (prevBuilder == NULL_DOCUMENT_BUILDER) {
setDocumentBuilder(null);
} else {
setDocumentBuilder(prevBuilder);
}
}
}
@Override
public void setBaseURI(final AnyURIValue uri) {
setBaseURI(uri, false);
}
@Override
public void setBaseURI(final AnyURIValue uri, final boolean setInProlog) {
if (baseURISetInProlog) {
return;
}
if (uri == null) {
baseURI = AnyURIValue.EMPTY_URI;
}
baseURI = uri;
baseURISetInProlog = setInProlog;
}
@Override
public void setModuleLoadPath(final String path) {
this.moduleLoadPath = path;
}
@Override
public String getModuleLoadPath() {
return moduleLoadPath;
}
@Override
public boolean isBaseURIDeclared() {
return baseURI != null && !baseURI.equals(AnyURIValue.EMPTY_URI);
}
@Override
public AnyURIValue getBaseURI() throws XPathException {
// the base URI in the static context is established according to the
// principles outlined in [RFC3986] Section 5.1—that is, it defaults
// first to the base URI of the encapsulating entity, then to the URI
// used to retrieve the entity, and finally to an implementation-defined
// default. If the URILiteral in the base URI declaration is a relative
// URI, then it is made absolute by resolving it with respect to this
// same hierarchy.
// It is not intrinsically an error if this process fails to establish
// an absolute base URI; however, the base URI in the static context
// is then undefined, and any attempt to use its value may result in
// an error [err:XPST0001].
// if ((baseURI == null) || baseURI.equals(AnyURIValue.EMPTY_URI)) {
// //throw new XPathException(ErrorCodes.XPST0001, "Base URI of the static context has not been assigned a value.");
// // We catch and resolve this to the XmlDbURI.ROOT_COLLECTION_URI
// // at least in DocumentImpl so maybe we should do it here./ljo
// }
return baseURI;
}
@Override
public void setContextSequencePosition(final int pos, final Sequence sequence) {
contextPosition = pos;
contextSequence = sequence;
}
@Override
public int getContextPosition() {
return contextPosition;
}
@Override
public Sequence getContextSequence() {
return contextSequence;
}
@Override
public void pushInScopeNamespaces() {
pushInScopeNamespaces(true);
}
@Override
public void pushInScopeNamespaces(final boolean inherit) {
//TODO : push into an inheritedInScopeNamespaces HashMap... and return an empty HashMap
namespaceStack.push(inheritedInScopeNamespaces);
namespaceStack.push(inheritedInScopePrefixes);
namespaceStack.push(inScopeNamespaces);
namespaceStack.push(inScopePrefixes);
//Current namespaces now become inherited just like the previous inherited ones
if (inherit) {
inheritedInScopeNamespaces = new HashMap<>(inheritedInScopeNamespaces);
inheritedInScopeNamespaces.putAll(inScopeNamespaces);
inheritedInScopePrefixes = new HashMap<>(inheritedInScopePrefixes);
inheritedInScopePrefixes.putAll(inScopePrefixes);
} else {
inheritedInScopeNamespaces = new HashMap<>();
inheritedInScopePrefixes = new HashMap<>();
}
//TODO : consider dynamic instanciation
inScopeNamespaces = new HashMap<>();
inScopePrefixes = new HashMap<>();
}
@Override
public void popInScopeNamespaces() {
inScopePrefixes = namespaceStack.pop();
inScopeNamespaces = namespaceStack.pop();
inheritedInScopePrefixes = namespaceStack.pop();
inheritedInScopeNamespaces = namespaceStack.pop();
}
@Override
public void pushNamespaceContext() {
final Map<String, String> m = new HashMap<>(staticNamespaces);
final Map<String, String> p = new HashMap<>(staticPrefixes);
namespaceStack.push(staticNamespaces);
namespaceStack.push(staticPrefixes);
staticNamespaces = m;
staticPrefixes = p;
}
@Override
public void popNamespaceContext() {
staticPrefixes = namespaceStack.pop();
staticNamespaces = namespaceStack.pop();
}
@Override
public LocalVariable markLocalVariables(final boolean newContext) {
if (newContext) {
if (lastVar == null) {
lastVar = new LocalVariable(QName.EMPTY_QNAME);
}
contextStack.push(lastVar);
}
variableStackSize++;
return lastVar;
}
@Override
public void popLocalVariables(@Nullable final LocalVariable var) {
popLocalVariables(var, null);
}
/**
* Restore the local variable stack to the position marked by variable var.
*
* @param var only clear variables after this variable, or null
* @param resultSeq the result sequence
*/
public void popLocalVariables(@Nullable final LocalVariable var, final Sequence resultSeq) {
if (var != null) {
// clear all variables registered after var. they should be out of scope.
LocalVariable outOfScope = var.after;
while (outOfScope != null) {
if (outOfScope != var && !outOfScope.isClosureVar()) {
outOfScope.destroy(this, resultSeq);
}
outOfScope = outOfScope.after;
}
// reset the stack
var.after = null;
if (!contextStack.isEmpty() && (var == contextStack.peek())) {
contextStack.pop();
}
}
lastVar = var;
variableStackSize--;
}
/**
* Register a inline function using closure variables so it can be cleared
* after query execution.
*
* @param func an inline function definition using closure variables
*/
void pushClosure(final UserDefinedFunction func) {
closures.add(func);
}
@Override
public int getCurrentStackSize() {
return variableStackSize;
}
@Override
public void functionStart(final FunctionSignature signature) {
callStack.push(signature);
}
@Override
public void functionEnd() {
if (callStack.isEmpty()) {
LOG.warn("Function call stack is empty, but XQueryContext.functionEnd() was called. This "
+ "could indicate a concurrency issue (shared XQueryContext?)");
} else {
callStack.pop();
}
}
@Override
public boolean tailRecursiveCall(final FunctionSignature signature) {
return callStack.contains(signature);
}
@Override
public void mapModule(final String namespace, final XmldbURI uri) {
mappedModules.put(namespace, uri);
}
@Override
public Module importModule(String namespaceURI, String prefix, String location)
throws XPathException {
if (XML_NS_PREFIX.equals(prefix) || XMLNS_ATTRIBUTE.equals(prefix)) {
throw new XPathException(ErrorCodes.XQST0070, "The prefix declared for a module import must not be 'xml' or 'xmlns'.");
}
if (namespaceURI != null && namespaceURI.isEmpty()) {
throw new XPathException(ErrorCodes.XQST0088, "The first URILiteral in a module import must be of nonzero length.");
}
Module module = null;
if (namespaceURI != null) {
module = getRootModule(namespaceURI);
}
if (module != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Module " + namespaceURI + " already present.");
}
// Set locally to remember the dependency in case it was inherited.
setModule(namespaceURI, module);
} else {
// if location is not specified, try to resolve in expath repo
if (location == null && namespaceURI != null) {
module = resolveInEXPathRepository(namespaceURI, prefix);
}
if (module == null) {
if (location == null && namespaceURI != null) {
// check if there's a static mapping in the configuration
location = getModuleLocation(namespaceURI);
if (location == null) {
location = namespaceURI;
}
}
//Is the module's namespace mapped to a URL ?
if (mappedModules.containsKey(location)) {
location = mappedModules.get(location).toString();
}
// is it a Java module?
if (location.startsWith(JAVA_URI_START)) {
location = location.substring(JAVA_URI_START.length());
module = loadBuiltInModule(namespaceURI, location);
} else {
Source moduleSource;
if (location.startsWith(XmldbURI.XMLDB_URI_PREFIX)
|| ((location.indexOf(':') == -1) && moduleLoadPath.startsWith(XmldbURI.XMLDB_URI_PREFIX))) {
// Is the module source stored in the database?
try {
XmldbURI locationUri = XmldbURI.xmldbUriFor(location);
if (moduleLoadPath.startsWith(XmldbURI.XMLDB_URI_PREFIX)) {
final XmldbURI moduleLoadPathUri = XmldbURI.xmldbUriFor(moduleLoadPath);
locationUri = moduleLoadPathUri.resolveCollectionPath(locationUri);
}
try (final LockedDocument lockedSourceDoc = getBroker().getXMLResource(locationUri.toCollectionPathURI(), LockMode.READ_LOCK)) {
final DocumentImpl sourceDoc = lockedSourceDoc == null ? null : lockedSourceDoc.getDocument();
if (sourceDoc == null) {
throw moduleLoadException("Module location hint URI '" + location + "' does not refer to anything.", location);
}
if ((sourceDoc.getResourceType() != DocumentImpl.BINARY_FILE) || !"application/xquery".equals(sourceDoc.getMetadata().getMimeType())) {
throw moduleLoadException("Module location hint URI '" + location + "' does not refer to an XQuery.", location);
}
moduleSource = new DBSource(getBroker(), (BinaryDocument) sourceDoc, true);
// we don't know if the module will get returned, oh well
module = compileOrBorrowModule(prefix, namespaceURI, location, moduleSource);
} catch (final PermissionDeniedException e) {
throw moduleLoadException("Permission denied to read module source from location hint URI '" + location + ".", location, e);
}
} catch (final URISyntaxException e) {
throw moduleLoadException("Invalid module location hint URI '" + location + "'.", location, e);
}
} else {
// No. Load from file or URL
try {
//TODO: use URIs to ensure proper resolution of relative locations
moduleSource = SourceFactory.getSource(getBroker(), moduleLoadPath, location, true);
if (moduleSource == null) {
throw moduleLoadException("Source for module '" + namespaceURI + "' not found module location hint URI '" + location + "'.", location);
}
} catch (final MalformedURLException e) {
throw moduleLoadException("Invalid module location hint URI '" + location + "'.", location, e);
} catch (final IOException e) {
throw moduleLoadException("Source for module '" + namespaceURI + "' could not be read, module location hint URI '" + location + "'.", location, e);
} catch (final PermissionDeniedException e) {
throw moduleLoadException("Permission denied to read module source from location hint URI '" + location + ".", location, e);
}
// we don't know if the module will get returned, oh well
module = compileOrBorrowModule(prefix, namespaceURI, location, moduleSource);
}
}
} // NOTE: expathrepo related, closes the EXPath else (if module != null)
}
if (module != null) {
if (namespaceURI == null) {
namespaceURI = module.getNamespaceURI();
}
if (prefix == null) {
prefix = module.getDefaultPrefix();
}
declareNamespace(prefix, namespaceURI);
}
return module;
}
protected XPathException moduleLoadException(final String message, final String moduleLocation)
throws XPathException {
return new XPathException(ErrorCodes.XQST0059, message, new ValueSequence(new StringValue(moduleLocation)));
}
protected XPathException moduleLoadException(final String message, final String moduleLocation, final Exception e)
throws XPathException {
return new XPathException(ErrorCodes.XQST0059, message, new ValueSequence(new StringValue(moduleLocation)), e);
}
@SuppressWarnings("unchecked")
@Override
public String getModuleLocation(final String namespaceURI) {
final Map<String, String> moduleMap =
(Map) getBroker().getConfiguration().getProperty(PROPERTY_STATIC_MODULE_MAP);
return moduleMap.get(namespaceURI);
}
@SuppressWarnings("unchecked")
@Override
public Iterator<String> getMappedModuleURIs() {
final Map<String, String> moduleMap =
(Map) getBroker().getConfiguration().getProperty(PROPERTY_STATIC_MODULE_MAP);
return moduleMap.keySet().iterator();
}
/**
* Compile of borrow an already compile module from the cache.
*
* @param prefix the module namespace prefix
* @param namespaceURI the module namespace URI
* @param location the location hint
* @param source the source for the module
*
* @return the module or null
*
* @throws XPathException if the module could not be loaded (XQST0059) or compiled (XPST0003)
*/
private ExternalModule compileOrBorrowModule(final String prefix, final String namespaceURI, final String location,
final Source source) throws XPathException {
final ExternalModule module = compileModule(prefix, namespaceURI, location, source);
if (module != null) {
setModule(module.getNamespaceURI(), module);
declareModuleVars(module);
}
return module;
}
/**
* Compile an XQuery Module
*
* @param prefix the namespace prefix of the module.
* @param namespaceURI the namespace URI of the module.
* @param location the location of the module
* @param source the source of the module.
* @return The compiled module, or null if the source is not a module
* @throws XPathException if the module could not be loaded (XQST0059) or compiled (XPST0003)
*/
private @Nullable
ExternalModule compileModule(final String prefix, String namespaceURI, final String location,
final Source source) throws XPathException {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading module from " + location);
}
try (final Reader reader = source.getReader()) {
if (reader == null) {
throw moduleLoadException("failed to load module: '" + namespaceURI + "' from: " +
"'" + source + "', location: '" + location + "'. Source not found. ", location);
}
if (namespaceURI == null) {
final QName qname = source.isModule();
if (qname == null) {
return null;
}
namespaceURI = qname.getNamespaceURI();
}
final ExternalModuleImpl modExternal = new ExternalModuleImpl(namespaceURI, prefix);
setModule(namespaceURI, modExternal);
final XQueryContext modContext = new ModuleContext(this, prefix, namespaceURI, location);
modExternal.setContext(modContext);
final XQueryLexer lexer = new XQueryLexer(modContext, reader);
final XQueryParser parser = new XQueryParser(lexer);
final XQueryTreeParser astParser = new XQueryTreeParser(modContext, modExternal);
try {
parser.xpath();
if (parser.foundErrors()) {
if (LOG.isDebugEnabled()) {
LOG.debug(parser.getErrorMessage());
}
throw new XPathException(ErrorCodes.XPST0003, "error found while loading module from " + location + ": " + parser.getErrorMessage());
}
final AST ast = parser.getAST();
final PathExpr path = new PathExpr(modContext);
astParser.xpath(ast, path);
if (astParser.foundErrors()) {
throw new XPathException(ErrorCodes.XPST0003, "error found while loading module from " + location + ": " + astParser.getErrorMessage(), astParser.getLastException());
}
modExternal.setRootExpression(path);
if (namespaceURI != null && !modExternal.getNamespaceURI().equals(namespaceURI)) {
throw new XPathException(ErrorCodes.XQST0059, "namespace URI declared by module (" + modExternal.getNamespaceURI() + ") does not match namespace URI in import statement, which was: " + namespaceURI);
}
// Set source information on module context
// String sourceClassName = source.getClass().getName();
// modContext.setSourceKey(source.getKey().toString());
// Extract the source type from the classname by removing the package prefix and the "Source" suffix
// modContext.setSourceType( sourceClassName.substring( 17, sourceClassName.length() - 6 ) );
modExternal.setSource(source);
modContext.setSource(source);
modExternal.setIsReady(true);
return modExternal;
} catch (final RecognitionException e) {
throw new XPathException(e.getLine(), e.getColumn(), ErrorCodes.XPST0003, "error found while loading module from " + location + ": " + e.getMessage());
} catch (final TokenStreamException e) {
throw new XPathException(ErrorCodes.XPST0003, "error found while loading module from " + location + ": " + e.getMessage(), e);
} catch (final XPathException e) {
e.prependMessage("Error while loading module " + location + ": ");
throw e;
}
} catch (final IOException e) {
throw moduleLoadException("IO exception while loading module '" + namespaceURI + "'" + " from '" + source + "'", location, e);
}
}
private void declareModuleVars(final Module module) {
final String moduleNS = module.getNamespaceURI();
for (final Iterator<Variable> i = globalVariables.values().iterator(); i.hasNext(); ) {
final Variable var = i.next();
if (moduleNS.equals(var.getQName().getNamespaceURI())) {
module.declareVariable(var);
i.remove();
}
}
}
@Override
public void addForwardReference(final FunctionCall call) {
forwardReferences.add(call);
}
@Override
public void resolveForwardReferences() throws XPathException {
while (!forwardReferences.isEmpty()) {
final FunctionCall call = forwardReferences.pop();
final UserDefinedFunction func = call.getContext().resolveFunction(call.getQName(), call.getArgumentCount());
if (func == null) {
throw new XPathException(call, ErrorCodes.XPST0017, "Call to undeclared function: " + call.getQName().getStringValue());
} else {
call.resolveForwardReference(func);
}
}
}
/**
* Get environment variables. The variables shall not change
* during execution of query.
*
* @return Map of environment variables
*/
public Map<String, String> getEnvironmentVariables() {
if (envs == null) {
envs = System.getenv();
}
return envs;
}
/**
* Gets the Effective user
* i.e. the user that the query is executing as
*
* @return The Effective User
*/
public Subject getEffectiveUser() {
return getBroker().getCurrentSubject();
}
/**
* Gets the Real User
* i.e. the user that initiated execution of the query
* Note this is not necessarily the same as the user that the
* query is executing as
*
* @return The Real User
* @see org.exist.xquery.XQueryContext#getEffectiveUser()
*/
public Subject getRealUser() {
return realUser;
}
private void setRealUser(final Subject realUser) {
this.realUser = realUser;
}
/**
* Get a static decimal format.
*
* @param qnDecimalFormat the name of the decimal format, or null for the UNNAMED format.
*
* @return the decimal format, or null if there is no format matching the name
*/
public @Nullable DecimalFormat getStaticDecimalFormat(@Nullable QName qnDecimalFormat) {
if (qnDecimalFormat == null) {
qnDecimalFormat = UNNAMED_DECIMAL_FORMAT;
}
return staticDecimalFormats.get(qnDecimalFormat);
}
/**
* Set a static decimal format.
*
* @param qnDecimalFormat the name of the decimal format
* @param decimalFormat the decimal format
*/
public void setStaticDecimalFormat(final QName qnDecimalFormat, final DecimalFormat decimalFormat) {
staticDecimalFormats.put(qnDecimalFormat, decimalFormat);
}
/**
* Save state
*/
private class SavedState {
private Map<String, Module> modulesSaved = null;
private Map<String, Module> allModulesSaved = null;
private Map<String, String> staticNamespacesSaved = null;
private Map<String, String> staticPrefixesSaved = null;
@SuppressWarnings("unchecked")
void save() {
if (modulesSaved == null) {
modulesSaved = new HashMap<>(modules);
allModulesSaved = new HashMap(allModules);
staticNamespacesSaved = new HashMap(staticNamespaces);
staticPrefixesSaved = new HashMap(staticPrefixes);
}
}
void restore() {
if (modulesSaved != null) {
modules = modulesSaved;
modulesSaved = null;
allModules = allModulesSaved;
allModulesSaved = null;
staticNamespaces = staticNamespacesSaved;
staticNamespacesSaved = null;
staticPrefixes = staticPrefixesSaved;
staticPrefixesSaved = null;
}
}
}
/**
* Before a dynamic import, make sure relevant parts of the current context a saved
* to the stack. This is important for util:import-module. The context will be restored
* during {@link #reset()}.
*/
public void saveState() {
savedState.save();
}
@Override
public boolean optimizationsEnabled() {
return enableOptimizer;
}
@Override
public void addOption(final String name, final String value) throws XPathException {
if (staticOptions == null) {
staticOptions = new ArrayList<>();
}
addOption(staticOptions, name, value);
}
@Override
public void addDynamicOption(final String name, final String value) throws XPathException {
if (dynamicOptions == null) {
dynamicOptions = new ArrayList<>();
}
addOption(dynamicOptions, name, value);
}
private void addOption(final List<Option> options, final String name, final String value) throws XPathException {
final QName qn;
try {
qn = QName.parse(this, name, defaultFunctionNamespace);
} catch (final QName.IllegalQNameException e) {
throw new XPathException(ErrorCodes.XPST0081, "No namespace defined for prefix " + name);
}
final Option option = new Option(qn, value);
//if the option exists, remove it so we can add the new option
for (int i = 0; i < options.size(); i++) {
if (options.get(i).equals(option)) {
options.remove(i);
break;
}
}
//add option
options.add(option);
// check predefined options
if (Option.PROFILE_QNAME.compareTo(qn) == 0) {
// configure profiling
profiler.configure(option);
} else if (Option.TIMEOUT_QNAME.compareTo(qn) == 0) {
watchdog.setTimeoutFromOption(option);
} else if (Option.OUTPUT_SIZE_QNAME.compareTo(qn) == 0) {
watchdog.setMaxNodesFromOption(option);
} else if (Option.OPTIMIZE_QNAME.compareTo(qn) == 0) {
final String[] params = option.tokenizeContents();
if (params.length > 0) {
final String[] param = Option.parseKeyValuePair(params[0]);
if (param != null && "enable".equals(param[0])) {
enableOptimizer = "yes".equals(param[1]);
}
}
}
//TODO : not sure how these 2 options might/have to be related
else if (Option.OPTIMIZE_IMPLICIT_TIMEZONE.compareTo(qn) == 0) {
//TODO : error check
final Duration duration = TimeUtils.getInstance().newDuration(option.getContents());
implicitTimeZone = new SimpleTimeZone((int) duration.getTimeInMillis(new Date()), "XQuery context");
} else if (Option.CURRENT_DATETIME.compareTo(qn) == 0) {
//TODO : error check
final DateTimeValue dtv = new DateTimeValue(option.getContents());
calendar = (XMLGregorianCalendar) dtv.calendar.clone();
}
}
@Override
public Option getOption(final QName qname) {
if (dynamicOptions != null) {
for (final Option option : dynamicOptions) {
if (qname.compareTo(option.getQName()) == 0) {
return option;
}
}
}
if (staticOptions != null) {
for (final Option option : staticOptions) {
if (qname.compareTo(option.getQName()) == 0) {
return option;
}
}
}
return null;
}
@Override
public Pragma getPragma(final String name, String contents) throws XPathException {
final QName qname;
try {
qname = QName.parse(this, name);
} catch (final QName.IllegalQNameException e) {
throw new XPathException(ErrorCodes.XPST0081, "No namespace defined for prefix " + name);
}
if (qname.getNamespaceURI().isEmpty()) {
throw new XPathException("XPST0081: pragma's ('" + name + "') namespace URI is empty");
} else if (Namespaces.EXIST_NS.equals(qname.getNamespaceURI())) {
contents = StringValue.trimWhitespace(contents);
if (TimerPragma.TIMER_PRAGMA.equals(qname)) {
return new TimerPragma(qname, contents);
}
if (Optimize.OPTIMIZE_PRAGMA.equals(qname)) {
return new Optimize(this, qname, contents, true);
}
if (ForceIndexUse.EXCEPTION_IF_INDEX_NOT_USED_PRAGMA.equals(qname)) {
return new ForceIndexUse(qname, contents);
}
if (ProfilePragma.PROFILING_PRAGMA.equals(qname)) {
return new ProfilePragma(qname, contents);
}
if (NoIndexPragma.NO_INDEX_PRAGMA.equals(qname)) {
return new NoIndexPragma(qname, contents);
}
}
return null;
}
@Override
public DocumentImpl storeTemporaryDoc(final org.exist.dom.memtree.DocumentImpl doc) throws XPathException {
try {
final DocumentImpl targetDoc = getBroker().storeTempResource(doc);
if (targetDoc == null) {
throw new XPathException("Internal error: failed to store temporary doc fragment");
}
LOG.warn("Stored: " + targetDoc.getDocId() + ": " + targetDoc.getURI(), new Throwable());
return targetDoc;
} catch (final EXistException | LockException | PermissionDeniedException e) {
throw new XPathException(TEMP_STORE_ERROR, e);
}
}
@Override
public void setAttribute(final String attribute, final Object value) {
attributes.put(attribute, value);
}
@Override
public Object getAttribute(final String attribute) {
return attributes.get(attribute);
}
/**
* Load the default prefix/namespace mappings table and set up internal functions.
*
* @param config the configuration
*/
@SuppressWarnings("unchecked")
void loadDefaults(final Configuration config) {
this.watchdog = new XQueryWatchDog(this);
/*
SymbolTable syms = broker.getSymbols();
String[] pfx = syms.defaultPrefixList();
namespaces = new HashMap(pfx.length);
prefixes = new HashMap(pfx.length);
String sym;
for (int i = 0; i < pfx.length; i++) {
sym = syms.getDefaultNamespace(pfx[i]);
namespaces.put(pfx[i], sym);
prefixes.put(sym, pfx[i]);
}
*/
loadDefaultNS();
// Switch: enable optimizer
Object param = config.getProperty(PROPERTY_ENABLE_QUERY_REWRITING);
enableOptimizer = (param != null) && "yes".equals(param.toString());
// Switch: Backward compatibility
param = config.getProperty(PROPERTY_XQUERY_BACKWARD_COMPATIBLE);
backwardsCompatible = (param == null) || "yes".equals(param.toString());
// Switch: raiseErrorOnFailedRetrieval
final Boolean option = ((Boolean) config.getProperty(PROPERTY_XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL));
raiseErrorOnFailedRetrieval = (option != null) && option;
// Get map of built-in modules
final Map<String, Class<Module>> builtInModules = (Map) config.getProperty(PROPERTY_BUILT_IN_MODULES);
if (builtInModules != null) {
// Iterate on all map entries
for (final Map.Entry<String, Class<Module>> entry : builtInModules.entrySet()) {
// Get URI and class
final String namespaceURI = entry.getKey();
final Class<Module> moduleClass = entry.getValue();
// first check if the module has already been loaded in the parent context
final Module module = getModule(namespaceURI);
if (module == null) {
// Module does not exist yet, instantiate
instantiateModule(namespaceURI, moduleClass,
(Map<String, Map<String, List<? extends Object>>>) config.getProperty(PROPERTY_MODULE_PARAMETERS));
} else if (getPrefixForURI(module.getNamespaceURI()) == null && !module.getDefaultPrefix().isEmpty()) {
// make sure the namespaces of default modules are known,
// even if they were imported in a parent context
try {
declareNamespace(module.getDefaultPrefix(), module.getNamespaceURI());
} catch (final XPathException e) {
LOG.warn("Internal error while loading default modules: " + e.getMessage(), e);
}
}
}
}
}
/**
* Load default namespaces, e.g. xml, xsi, xdt, fn, local, exist and dbgp.
*/
private void loadDefaultNS() {
try {
// default namespaces
staticNamespaces.put(XML_NS_PREFIX, XML_NS);
staticPrefixes.put(XML_NS, XML_NS_PREFIX);
declareNamespace("xs", Namespaces.SCHEMA_NS);
declareNamespace("xsi", Namespaces.SCHEMA_INSTANCE_NS);
//required for backward compatibility
declareNamespace("xdt", Namespaces.XPATH_DATATYPES_NS);
declareNamespace("fn", Namespaces.XPATH_FUNCTIONS_NS);
declareNamespace("local", Namespaces.XQUERY_LOCAL_NS);
declareNamespace(Namespaces.W3C_XQUERY_XPATH_ERROR_PREFIX, Namespaces.W3C_XQUERY_XPATH_ERROR_NS);
//*not* as standard NS
declareNamespace(Namespaces.EXIST_NS_PREFIX, Namespaces.EXIST_NS);
declareNamespace(Namespaces.EXIST_JAVA_BINDING_NS_PREFIX, Namespaces.EXIST_JAVA_BINDING_NS);
declareNamespace(Namespaces.EXIST_XQUERY_XPATH_ERROR_PREFIX, Namespaces.EXIST_XQUERY_XPATH_ERROR_NS);
//TODO : include "err" namespace ?
declareNamespace("dbgp", Debuggee.NAMESPACE_URI);
} catch (final XPathException e) {
//ignored because it should never happen
if (LOG.isDebugEnabled()) {
LOG.debug(e);
}
}
}
@Override
public void registerUpdateListener(final UpdateListener listener) {
if (updateListener == null) {
updateListener = new ContextUpdateListener();
final DBBroker broker = getBroker();
broker.getBrokerPool().getNotificationService().subscribe(updateListener);
}
updateListener.addListener(listener);
}
protected void clearUpdateListeners() {
if (updateListener != null) {
final DBBroker broker = getBroker();
broker.getBrokerPool().getNotificationService().unsubscribe(updateListener);
}
updateListener = null;
}
@Override
public void checkOptions(final Properties properties) throws XPathException {
checkLegacyOptions(properties);
if (dynamicOptions != null) {
for (final Option option : dynamicOptions) {
if (Namespaces.XSLT_XQUERY_SERIALIZATION_NS.equals(option.getQName().getNamespaceURI())) {
SerializerUtils.setProperty(option.getQName().getLocalPart(), option.getContents(), properties,
inScopeNamespaces::get);
}
}
}
if (staticOptions != null) {
for (final Option option : staticOptions) {
if (Namespaces.XSLT_XQUERY_SERIALIZATION_NS.equals(option.getQName().getNamespaceURI())
&& !properties.containsKey(option.getQName().getLocalPart())) {
SerializerUtils.setProperty(option.getQName().getLocalPart(), option.getContents(), properties,
inScopeNamespaces::get);
}
}
}
}
/**
* Legacy method to check serialization properties set via option exist:serialize.
*
* @param properties the serialization properties
* @throws XPathException if there is an unknown serialization property
*/
private void checkLegacyOptions(final Properties properties) throws XPathException {
final Option pragma = getOption(Option.SERIALIZE_QNAME);
if (pragma == null) {
return;
}
final String[] contents = pragma.tokenizeContents();
for (final String content : contents) {
final String[] pair = Option.parseKeyValuePair(content);
if (pair == null) {
throw new XPathException("Unknown parameter found in " + pragma.getQName().getStringValue()
+ ": '" + content + "'");
}
if (LOG.isDebugEnabled()) {
LOG.debug("Setting serialization property from pragma: " + pair[0] + " = " + pair[1]);
}
properties.setProperty(pair[0], pair[1]);
}
}
@Override
public void setDebuggeeJoint(final DebuggeeJoint joint) {
//XXX: if (debuggeeJoint != null) ???
debuggeeJoint = joint;
}
@Override
public DebuggeeJoint getDebuggeeJoint() {
return debuggeeJoint;
}
@Override
public boolean isDebugMode() {
return debuggeeJoint != null && isVarDeclared(Debuggee.SESSION);
}
@Override
public boolean requireDebugMode() {
return isVarDeclared(Debuggee.SESSION);
}
private Deque<BinaryValue> binaryValueInstances;
void enterEnclosedExpr() {
if (binaryValueInstances != null) {
final Iterator<BinaryValue> it = binaryValueInstances.descendingIterator();
while (it.hasNext()) {
it.next().incrementSharedReferences();
}
}
}
void exitEnclosedExpr() {
if (binaryValueInstances != null) {
final Iterator<BinaryValue> it = binaryValueInstances.iterator();
List<BinaryValue> destroyable = null;
while (it.hasNext()) {
try {
final BinaryValue bv = it.next();
bv.close(); // really just decrements a reference
if (bv.isClosed()) {
if (destroyable == null) {
destroyable = new ArrayList<>();
}
destroyable.add(bv);
}
} catch (final IOException e) {
LOG.warn("Unable to close binary reference on exiting enclosed expression: " + e.getMessage(), e);
}
}
// eagerly cleanup those BinaryValues that are not used outside the EnclosedExpr (to release memory)
if (destroyable != null) {
for (final BinaryValue bvd : destroyable) {
binaryValueInstances.remove(bvd);
}
}
}
}
@Override
public void registerBinaryValueInstance(final BinaryValue binaryValue) {
if (binaryValueInstances == null) {
binaryValueInstances = new ArrayDeque<>();
}
if (cleanupTasks.isEmpty() || cleanupTasks.stream().noneMatch(ct -> ct instanceof BinaryValueCleanupTask)) {
cleanupTasks.add(new BinaryValueCleanupTask());
}
binaryValueInstances.push(binaryValue);
}
/**
* Cleanup Task which is responsible for relasing the streams
* of any {@link BinaryValue} which have been used during
* query execution
*/
public static class BinaryValueCleanupTask implements CleanupTask {
@Override
public void cleanup(final XQueryContext context, final Predicate<Object> predicate) {
if (context.binaryValueInstances != null) {
List<BinaryValue> removable = null;
for (final BinaryValue bv : context.binaryValueInstances) {
try {
if (predicate.test(bv)) {
bv.close();
if (removable == null) {
removable = new ArrayList<>();
}
removable.add(bv);
}
} catch (final IOException e) {
LOG.error("Unable to close binary value: " + e.getMessage(), e);
}
}
if (removable != null) {
for (final BinaryValue bv : removable) {
context.binaryValueInstances.remove(bv);
}
}
}
}
}
@Override
public String getCacheClass() {
return (String) getBroker().getConfiguration().getProperty(Configuration.BINARY_CACHE_CLASS_PROPERTY);
}
public void destroyBinaryValue(final BinaryValue value) {
if (binaryValueInstances != null) {
binaryValueInstances.remove(value);
}
}
public void setXQueryVersion(int version) {
xqueryVersion = version;
}
public int getXQueryVersion() {
return xqueryVersion;
}
@Override
public Source getSource() {
return source;
}
@Override
public void setSource(final Source source) {
this.source = source;
}
/**
* NOTE: the {@link #unsubscribe()} method can be called
* from {@link org.exist.storage.NotificationService#unsubscribe(UpdateListener)}
* by another thread, so this class needs to be thread-safe.
*/
@ThreadSafe
private static class ContextUpdateListener implements UpdateListener {
/*
* We use Concurrent safe data structures here, so that we don't have
* to block any calling threads.
*
* The AtomicReference enables us to quickly clear the listeners
* in #unsubscribe() and maintain happens-before integrity whilst
* unsubcribing them. The CopyOnWriteArrayList allows
* us to add listeners whilst iterating over a snapshot
* of existing iterators in other methods.
*/
private final AtomicReference<List<UpdateListener>> listeners = new AtomicReference<>(new CopyOnWriteArrayList<>());
private void addListener(final UpdateListener listener) {
listeners.get().add(listener);
}
@Override
public void documentUpdated(final DocumentImpl document, final int event) {
listeners.get().forEach(listener -> listener.documentUpdated(document, event));
}
@Override
public void unsubscribe() {
List<UpdateListener> prev = listeners.get();
while (!listeners.compareAndSet(prev, new CopyOnWriteArrayList<>())) {
prev = listeners.get();
}
prev.forEach(UpdateListener::unsubscribe);
}
@Override
public void nodeMoved(final NodeId oldNodeId, final NodeHandle newNode) {
listeners.get().forEach(listener -> listener.nodeMoved(oldNodeId, newNode));
}
@Override
public void debug() {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("XQueryContext: %s document update listeners", listeners.get().size()));
}
listeners.get().forEach(UpdateListener::debug);
}
}
private final List<CleanupTask> cleanupTasks = new ArrayList<>();
public void registerCleanupTask(final CleanupTask cleanupTask) {
cleanupTasks.add(cleanupTask);
}
public interface CleanupTask {
void cleanup(final XQueryContext context, final Predicate<Object> predicate);
}
@Override
public void runCleanupTasks(final Predicate<Object> predicate) {
for (final CleanupTask cleanupTask : cleanupTasks) {
try {
cleanupTask.cleanup(this, predicate);
} catch (final Throwable t) {
LOG.error("Cleaning up XQueryContext: Ignoring: " + t.getMessage(), t);
}
}
// now it is safe to clear the cleanup tasks list as we know they have run
// do not move this anywhere else
cleanupTasks.clear();
}
@Immutable
public static class HttpContext {
private final RequestWrapper request;
private final ResponseWrapper response;
private final SessionWrapper session;
public HttpContext(final RequestWrapper request, final ResponseWrapper response, final SessionWrapper session) {
this.request = request;
this.response = response;
this.session = session;
}
public HttpContext(final RequestWrapper request, final ResponseWrapper response) {
this.request = request;
this.response = response;
this.session = request.getSession(false);
}
public RequestWrapper getRequest() {
return request;
}
public ResponseWrapper getResponse() {
return response;
}
public SessionWrapper getSession() {
return session;
}
/**
* Returns a new HttpContext with the new session set.
*
* The request and response are referenced from this object.
*
* @param newSession the new session to set.
* @return the new HttpContext.
*/
public HttpContext setSession(final SessionWrapper newSession) {
return new HttpContext(request, response, newSession);
}
}
}
|
exist-core/src/main/java/org/exist/xquery/XQueryContext.java
|
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001-2018 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.exist.xquery;
import java.io.IOException;
import java.io.Reader;
import java.lang.reflect.Constructor;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nullable;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.Duration;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.stream.XMLStreamException;
import antlr.RecognitionException;
import antlr.TokenStreamException;
import antlr.collections.AST;
import com.evolvedbinary.j8fu.Either;
import com.evolvedbinary.j8fu.function.TriFunctionE;
import com.evolvedbinary.j8fu.function.QuadFunctionE;
import com.evolvedbinary.j8fu.tuple.Tuple2;
import com.ibm.icu.text.Collator;
import net.jcip.annotations.Immutable;
import net.jcip.annotations.ThreadSafe;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.exist.Database;
import org.exist.EXistException;
import org.exist.Namespaces;
import org.exist.collections.Collection;
import org.exist.debuggee.Debuggee;
import org.exist.debuggee.DebuggeeJoint;
import org.exist.dom.persistent.*;
import org.exist.dom.QName;
import org.exist.http.servlets.*;
import org.exist.interpreter.Context;
import org.exist.dom.memtree.InMemoryXMLStreamReader;
import org.exist.dom.memtree.MemTreeBuilder;
import org.exist.dom.memtree.NodeImpl;
import org.exist.numbering.NodeId;
import org.exist.repo.ExistRepository;
import org.exist.security.AuthenticationException;
import org.exist.security.Permission;
import org.exist.security.PermissionDeniedException;
import org.exist.security.Subject;
import org.exist.source.*;
import org.exist.stax.ExtendedXMLStreamReader;
import org.exist.storage.DBBroker;
import org.exist.storage.UpdateListener;
import org.exist.storage.lock.Lock.LockMode;
import org.exist.storage.lock.LockedDocumentMap;
import org.exist.storage.txn.Txn;
import org.exist.util.Collations;
import org.exist.util.Configuration;
import org.exist.util.LockException;
import org.exist.util.hashtable.NamePool;
import org.exist.xmldb.XmldbURI;
import org.exist.xquery.parser.*;
import org.exist.xquery.pragmas.*;
import org.exist.xquery.update.Modification;
import org.exist.xquery.util.SerializerUtils;
import org.exist.xquery.value.*;
import org.w3c.dom.Node;
import static com.evolvedbinary.j8fu.tuple.Tuple.Tuple;
import static javax.xml.XMLConstants.XMLNS_ATTRIBUTE;
import static javax.xml.XMLConstants.XML_NS_PREFIX;
import static org.exist.Namespaces.XML_NS;
/**
* The current XQuery execution context. Contains the static as well as the dynamic
* XQuery context components.
*
* @author <a href="mailto:wolfgang@exist-db.org">Wolfgang Meier</a>
*/
public class XQueryContext implements BinaryValueManager, Context {
private static final Logger LOG = LogManager.getLogger(XQueryContext.class);
public static final String ENABLE_QUERY_REWRITING_ATTRIBUTE = "enable-query-rewriting";
public static final String XQUERY_BACKWARD_COMPATIBLE_ATTRIBUTE = "backwardCompatible";
public static final String XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_ATTRIBUTE = "raise-error-on-failed-retrieval";
public static final String ENFORCE_INDEX_USE_ATTRIBUTE = "enforce-index-use";
//TODO : move elsewhere ?
public static final String BUILT_IN_MODULE_URI_ATTRIBUTE = "uri";
public static final String BUILT_IN_MODULE_CLASS_ATTRIBUTE = "class";
public static final String BUILT_IN_MODULE_SOURCE_ATTRIBUTE = "src";
public static final String PROPERTY_XQUERY_BACKWARD_COMPATIBLE = "xquery.backwardCompatible";
public static final String PROPERTY_ENABLE_QUERY_REWRITING = "xquery.enable-query-rewriting";
public static final String PROPERTY_XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL = "xquery.raise-error-on-failed-retrieval";
public static final boolean XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_DEFAULT = false;
public static final String PROPERTY_ENFORCE_INDEX_USE = "xquery.enforce-index-use";
//TODO : move elsewhere ?
public static final String PROPERTY_BUILT_IN_MODULES = "xquery.modules";
public static final String PROPERTY_STATIC_MODULE_MAP = "xquery.modules.static";
public static final String PROPERTY_MODULE_PARAMETERS = "xquery.modules.parameters";
public static final String JAVA_URI_START = "java:";
//private static final String XMLDB_URI_START = "xmldb:exist://";
private static final String TEMP_STORE_ERROR = "Error occurred while storing temporary data";
public static final String XQUERY_CONTEXTVAR_XQUERY_UPDATE_ERROR = "_eXist_xquery_update_error";
public static final String HTTP_SESSIONVAR_XMLDB_USER = "_eXist_xmldb_user";
public static final String HTTP_REQ_ATTR_USER = "xquery.user";
public static final String HTTP_REQ_ATTR_PASS = "xquery.password";
// Static namespace/prefix mappings
protected Map<String, String> staticNamespaces = new HashMap<>();
// Static prefix/namespace mappings
protected Map<String, String> staticPrefixes = new HashMap<>();
// Local in-scope namespace/prefix mappings in the current context
Map<String, String> inScopeNamespaces = new HashMap<>();
// Local prefix/namespace mappings in the current context
private Map<String, String> inScopePrefixes = new HashMap<>();
// Inherited in-scope namespace/prefix mappings in the current context
private Map<String, String> inheritedInScopeNamespaces = new HashMap<>();
// Inherited prefix/namespace mappings in the current context
private Map<String, String> inheritedInScopePrefixes = new HashMap<>();
private Map<String, XmldbURI> mappedModules = new HashMap<>();
private boolean preserveNamespaces = true;
private boolean inheritNamespaces = true;
// Local namespace stack
private Deque<Map<String, String>> namespaceStack = new ArrayDeque<>();
// Known user defined functions in the local module
private TreeMap<FunctionId, UserDefinedFunction> declaredFunctions = new TreeMap<>();
// Globally declared variables
protected Map<QName, Variable> globalVariables = new TreeMap<>();
// The last element in the linked list of local in-scope variables
private LocalVariable lastVar = null;
private Deque<LocalVariable> contextStack = new ArrayDeque<>();
private Deque<FunctionSignature> callStack = new ArrayDeque<>();
// The current size of the variable stack
private int variableStackSize = 0;
// Unresolved references to user defined functions
private Deque<FunctionCall> forwardReferences = new ArrayDeque<>();
// Inline functions using closures need to be cleared after execution
private Deque<UserDefinedFunction> closures = new ArrayDeque<>();
// List of options declared for this query at compile time - i.e. declare option
private List<Option> staticOptions = null;
// List of options declared for this query at run time - i.e. util:declare-option()
private List<Option> dynamicOptions = null;
//The Calendar for this context : may be changed by some options
private XMLGregorianCalendar calendar = null;
private TimeZone implicitTimeZone = null;
/**
* the watchdog object assigned to this query.
*/
protected XQueryWatchDog watchdog;
/**
* Loaded modules.
*/
protected Map<String, Module> modules = new HashMap<>();
/**
* Loaded modules, including ones bubbled up from imported modules.
*/
private Map<String, Module> allModules = new HashMap<>();
/**
* Used to save current state when modules are imported dynamically
*/
private SavedState savedState = new SavedState();
/**
* Whether some modules were rebound to new instances since the last time this context's query was analyzed. (This assumes that each context is
* attached to at most one query.)
*/
@SuppressWarnings("unused")
private boolean modulesChanged = true;
/**
* The set of statically known documents specified as an array of paths to documents and collections.
*/
private XmldbURI[] staticDocumentPaths = null;
/**
* The actual set of statically known documents. This will be generated on demand from staticDocumentPaths.
*/
private DocumentSet staticDocuments = null;
/**
* The available documents of the dynamic context.
*
* {@see https://www.w3.org/TR/xpath-31/#dt-available-docs}.
*/
private Map<String, TriFunctionE<DBBroker, Txn, String, Either<org.exist.dom.memtree.DocumentImpl, DocumentImpl>, XPathException>> dynamicDocuments = null;
/**
* The available test resources of the dynamic context.
* <p>
* {@see https://www.w3.org/TR/xpath-31/#dt-available-text-resources}.
*/
private Map<Tuple2<String, Charset>, QuadFunctionE<DBBroker, Txn, String, Charset, Reader, XPathException>> dynamicTextResources = null;
/**
* The available collections of the dynamic context.
*
* {@see https://www.w3.org/TR/xpath-31/#dt-available-collections}.
*/
private Map<String, TriFunctionE<DBBroker, Txn, String, Sequence, XPathException>> dynamicCollections = null;
/**
* A set of documents which were modified during the query, usually through an XQuery update extension. The documents will be checked after the
* query completed to see if a defragmentation run is needed.
*/
protected MutableDocumentSet modifiedDocuments = null;
/**
* A general-purpose map to set attributes in the current query context.
*/
protected Map<String, Object> attributes = new HashMap<>();
protected AnyURIValue baseURI = AnyURIValue.EMPTY_URI;
private boolean baseURISetInProlog = false;
protected String moduleLoadPath = ".";
private String defaultFunctionNamespace = Function.BUILTIN_FUNCTION_NS;
private AnyURIValue defaultElementNamespace = AnyURIValue.EMPTY_URI;
private AnyURIValue defaultElementNamespaceSchema = AnyURIValue.EMPTY_URI;
/**
* The default collation URI.
*/
private String defaultCollation = Collations.UNICODE_CODEPOINT_COLLATION_URI;
/**
* Default Collator. Will be null for the default unicode codepoint collation.
*/
private Collator defaultCollator = null;
/**
* Set to true to enable XPath 1.0 backwards compatibility.
*/
private boolean backwardsCompatible = false;
/**
* Should whitespace inside node constructors be stripped?
*/
private boolean stripWhitespace = true;
/**
* Should empty order greatest or least?
*/
private boolean orderEmptyGreatest = true;
/**
* XQuery 3.0 - declare context item :=
*/
private ContextItemDeclaration contextItemDeclaration = null;
/**
* The context item set in the query prolog or externally
*/
private Sequence contextItem = Sequence.EMPTY_SEQUENCE;
/**
* The position of the currently processed item in the context sequence. This field has to be set on demand, for example, before calling the
* fn:position() function.
*/
private int contextPosition = 0;
private Sequence contextSequence = null;
/**
* Shared name pool used by all in-memory documents constructed in this query context.
*/
private NamePool sharedNamePool = null;
/**
* Stack for temporary document fragments.
*/
private Deque<MemTreeBuilder> fragmentStack = new ArrayDeque<>();
/**
* The root of the expression tree.
*/
private Expression rootExpression;
/**
* An incremental counter to count the expressions in the current XQuery. Used during compilation to assign a unique ID to every expression.
*/
private int expressionCounter = 0;
// /**
// * Should all documents loaded by the query be locked? If set to true, it is the responsibility of the calling client code to unlock documents
// * after the query has completed.
// */
// private boolean lockDocumentsOnLoad = false;
// /**
// * Documents locked during the query.
// */
// private LockedDocumentMap lockedDocuments = null;
private LockedDocumentMap protectedDocuments = null;
/**
* The profiler instance used by this context.
*/
protected Profiler profiler;
//For holding the environment variables
private Map<String, String> envs;
private ContextUpdateListener updateListener = null;
private boolean enableOptimizer = true;
private boolean raiseErrorOnFailedRetrieval = XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_DEFAULT;
private boolean isShared = false;
private Source source = null;
private DebuggeeJoint debuggeeJoint = null;
private int xqueryVersion = 31;
protected Database db;
private boolean analyzed = false;
/**
* The Subject of the User that requested the execution of the XQuery
* attached by this Context. This is not the same as the Effective User
* as we may be executed setUid or setGid. The Effective User can be retrieved
* through broker.getCurrentSubject()
*/
private Subject realUser;
/**
* Indicates whether a user from a http session
* was pushed onto the current broker from {@link XQueryContext#prepareForExecution()},
* if so then we must pop the user in {@link XQueryContext#reset(boolean)}
*/
private boolean pushedUserFromHttpSession = false;
/**
* The HTTP context within which the XQuery
* is executing, or null if there is no
* HTTP context.
*/
@Nullable
private HttpContext httpContext = null;
private final Map<QName, DecimalFormat> staticDecimalFormats = new HashMap<>();
private static final QName UNNAMED_DECIMAL_FORMAT = new QName("__UNNAMED__", Function.BUILTIN_FUNCTION_NS);
public XQueryContext() {
profiler = new Profiler(null);
staticDecimalFormats.put(UNNAMED_DECIMAL_FORMAT, DecimalFormat.UNNAMED);
}
public XQueryContext(final Database db) {
this(db, new Profiler(db));
}
public XQueryContext(final Database db, Profiler profiler) {
this();
this.db = db;
loadDefaults(db.getConfiguration());
this.profiler = profiler;
}
public XQueryContext(final XQueryContext copyFrom) {
this();
this.db = copyFrom.db;
loadDefaultNS();
for (final String prefix : copyFrom.staticNamespaces.keySet()) {
if (XML_NS_PREFIX.equals(prefix) || XMLNS_ATTRIBUTE.equals(prefix)) {
continue;
}
try {
declareNamespace(prefix, copyFrom.staticNamespaces.get(prefix));
} catch (final XPathException ex) {
ex.printStackTrace();
}
}
this.profiler = copyFrom.profiler;
}
/**
* Get the HTTP context of the XQuery.
*
* @return the HTTP context, or null if the query
* is not being executed within an HTTP context.
*/
public @Nullable
HttpContext getHttpContext() {
return httpContext;
}
/**
* Set the HTTP context of the XQuery.
*
* @param httpContext the HTTP context within which the XQuery
* is being executed.
*/
public void setHttpContext(final HttpContext httpContext) {
this.httpContext = httpContext;
}
public Optional<ExistRepository> getRepository() {
return getBroker().getBrokerPool().getExpathRepo();
}
/**
* Resolve a Module from the EXPath Repository.
*
* @param namespace namespace URI
* @param prefix namespace prefix
*
* @return the module or null
*
* @throws XPathException if the namespace URI is invalid (XQST0046),
* if the module could not be loaded (XQST0059) or compiled (XPST0003)
*/
private Module resolveInEXPathRepository(final String namespace, final String prefix)
throws XPathException {
// the repo and its eXist handler
final Optional<ExistRepository> repo = getRepository();
// try an internal module
if (repo.isPresent()) {
final Module jMod = repo.get().resolveJavaModule(namespace, this);
if (jMod != null) {
return jMod;
}
}
// try an eXist-specific module
Path resolved = null;
if (repo.isPresent()) {
resolved = repo.get().resolveXQueryModule(namespace);
// use the resolved file or return null
if (resolved == null) {
return null;
}
}
// build a module object from the file
final Source src = new FileSource(resolved, false);
return compileOrBorrowModule(prefix, namespace, "", src);
}
/**
* Prepares the XQuery Context for use.
*
* Should be called before compilation to prepare the query context,
* or before re-execution if the query was cached.
*
* @throws XPathException in case of static error
*/
public void prepareForReuse() throws XPathException {
// prepare the variables of the internal modules (which were previously reset)
try (final Stream<InternalModule> internalModules = allModules
.values()
.stream()
.filter(module -> module instanceof InternalModule)
.map(module -> (InternalModule) module)) {
for (final InternalModule internalModule : internalModules.collect(Collectors.toList())) {
internalModule.prepare(this);
}
}
}
@Override
public boolean hasParent() {
return false;
}
@Override
public XQueryContext getRootContext() {
return this;
}
@Override
public XQueryContext copyContext() {
final XQueryContext ctx = new XQueryContext(this);
copyFields(ctx);
return ctx;
}
@Override
public void updateContext(final XQueryContext from) {
this.watchdog = from.watchdog;
this.lastVar = from.lastVar;
this.variableStackSize = from.getCurrentStackSize();
this.contextStack = from.contextStack;
this.inScopeNamespaces = from.inScopeNamespaces;
this.inScopePrefixes = from.inScopePrefixes;
this.inheritedInScopeNamespaces = from.inheritedInScopeNamespaces;
this.inheritedInScopePrefixes = from.inheritedInScopePrefixes;
this.variableStackSize = from.variableStackSize;
this.attributes = from.attributes;
this.updateListener = from.updateListener;
this.modules = from.modules;
this.allModules = from.allModules;
this.mappedModules = from.mappedModules;
this.dynamicOptions = from.dynamicOptions;
this.staticOptions = from.staticOptions;
this.db = from.db;
this.httpContext = from.httpContext;
}
protected void copyFields(final XQueryContext ctx) {
ctx.calendar = this.calendar;
ctx.implicitTimeZone = this.implicitTimeZone;
ctx.baseURI = this.baseURI;
ctx.baseURISetInProlog = this.baseURISetInProlog;
ctx.staticDocumentPaths = this.staticDocumentPaths;
ctx.staticDocuments = this.staticDocuments;
ctx.dynamicDocuments = this.dynamicDocuments;
ctx.dynamicTextResources = this.dynamicTextResources;
ctx.dynamicCollections = this.dynamicCollections;
ctx.moduleLoadPath = this.moduleLoadPath;
ctx.defaultFunctionNamespace = this.defaultFunctionNamespace;
ctx.defaultElementNamespace = this.defaultElementNamespace;
ctx.defaultCollation = this.defaultCollation;
ctx.defaultCollator = this.defaultCollator;
ctx.backwardsCompatible = this.backwardsCompatible;
ctx.enableOptimizer = this.enableOptimizer;
ctx.stripWhitespace = this.stripWhitespace;
ctx.preserveNamespaces = this.preserveNamespaces;
ctx.inheritNamespaces = this.inheritNamespaces;
ctx.orderEmptyGreatest = this.orderEmptyGreatest;
ctx.declaredFunctions = new TreeMap<>(this.declaredFunctions);
ctx.globalVariables = new TreeMap<>(this.globalVariables);
ctx.attributes = new HashMap<>(this.attributes);
// make imported modules available in the new context
ctx.modules = new HashMap<>();
for (final Module module : this.modules.values()) {
try {
ctx.modules.put(module.getNamespaceURI(), module);
final String prefix = this.staticPrefixes.get(module.getNamespaceURI());
ctx.declareNamespace(prefix, module.getNamespaceURI());
} catch (final XPathException e) {
// ignore
}
}
ctx.allModules = new HashMap<>();
for (final Module module : this.allModules.values()) {
if (module != null) { //UNDERSTAND: why is it possible? -shabanovd
ctx.allModules.put(module.getNamespaceURI(), module);
}
}
ctx.watchdog = this.watchdog;
ctx.profiler = getProfiler();
ctx.lastVar = this.lastVar;
ctx.variableStackSize = getCurrentStackSize();
ctx.contextStack = this.contextStack;
ctx.mappedModules = new HashMap<>(this.mappedModules);
ctx.staticNamespaces = new HashMap<>(this.staticNamespaces);
ctx.staticPrefixes = new HashMap<>(this.staticPrefixes);
if (this.dynamicOptions != null) {
ctx.dynamicOptions = new ArrayList<>(this.dynamicOptions);
}
if (this.staticOptions != null) {
ctx.staticOptions = new ArrayList<>(this.staticOptions);
}
ctx.source = this.source;
ctx.httpContext = this.httpContext;
}
@Override
public void prepareForExecution() {
//if there is an existing user in the current http session
//then set the DBBroker user
final Subject user = getUserFromHttpSession();
if (user != null) {
getBroker().pushSubject(user); //this will be popped in {@link XQueryContext#reset(boolean)}
this.pushedUserFromHttpSession = true;
}
setRealUser(getBroker().getCurrentSubject()); //this will be unset in {@link XQueryContext#reset(boolean)}
//Reset current context position
setContextSequencePosition(0, null);
//Note that, for some reasons, an XQueryContext might be used without calling this method
}
public void setContextItem(final Sequence contextItem) {
this.contextItem = contextItem;
}
public void setContextItemDeclaration(final ContextItemDeclaration contextItemDeclaration) {
this.contextItemDeclaration = contextItemDeclaration;
}
public ContextItemDeclaration getContextItemDeclartion() {
return contextItemDeclaration;
}
public Sequence getContextItem() {
return contextItem;
}
@Override
public boolean isProfilingEnabled() {
return profiler.isEnabled();
}
@Override
public boolean isProfilingEnabled(final int verbosity) {
return profiler.isEnabled() && profiler.verbosity() >= verbosity;
}
@Override
public Profiler getProfiler() {
return profiler;
}
@Override
public void setRootExpression(final Expression expr) {
this.rootExpression = expr;
}
@Override
public Expression getRootExpression() {
return rootExpression;
}
/**
* Returns the next unique expression id. Every expression in the XQuery is identified by a unique id. During compilation, expressions are
* assigned their id by calling this method.
*
* @return The next unique expression id.
*/
int nextExpressionId() {
return expressionCounter++;
}
@Override
public int getExpressionCount() {
return expressionCounter;
}
@Override
public void declareNamespace(String prefix, String uri) throws XPathException {
if (prefix == null) {
prefix = "";
}
if (uri == null) {
uri = "";
}
if (XML_NS_PREFIX.equals(prefix) || XMLNS_ATTRIBUTE.equals(prefix)) {
throw new XPathException(ErrorCodes.XQST0070, "Namespace predefined prefix '" + prefix + "' can not be bound");
}
if (uri.equals(XML_NS)) {
throw new XPathException(ErrorCodes.XQST0070, "Namespace URI '" + uri + "' must be bound to the 'xml' prefix");
}
final String prevURI = staticNamespaces.get(prefix);
//This prefix was not bound
if (prevURI == null) {
if (uri.isEmpty()) {
//Nothing to bind
//TODO : check the specs : unbinding an NS which is not already bound may be disallowed.
LOG.warn("Unbinding unbound prefix '" + prefix + "'");
} else {
//Bind it
staticNamespaces.put(prefix, uri);
staticPrefixes.put(uri, prefix);
}
} else {
//This prefix was bound
//Unbind it
if (uri.isEmpty()) {
// if an empty namespace is specified,
// remove any existing mapping for this namespace
//TODO : improve, since XML_NS can't be unbound
staticPrefixes.remove(uri);
staticNamespaces.remove(prefix);
return;
}
//those prefixes can be rebound to different URIs
if (("xs".equals(prefix) && Namespaces.SCHEMA_NS.equals(prevURI))
|| ("xsi".equals(prefix) && Namespaces.SCHEMA_INSTANCE_NS.equals(prevURI))
|| ("xdt".equals(prefix) && Namespaces.XPATH_DATATYPES_NS.equals(prevURI))
|| ("fn".equals(prefix) && Namespaces.XPATH_FUNCTIONS_NS.equals(prevURI))
|| ("math".equals(prefix)) && Namespaces.XPATH_FUNCTIONS_MATH_NS.equals(prevURI)
|| ("local".equals(prefix) && Namespaces.XQUERY_LOCAL_NS.equals(prevURI))) {
staticPrefixes.remove(prevURI);
staticNamespaces.remove(prefix);
staticNamespaces.put(prefix, uri);
staticPrefixes.put(uri, prefix);
} else {
//Forbids rebinding the *same* prefix in a *different* namespace in this *same* context
if (!uri.equals(prevURI)) {
throw new XPathException(ErrorCodes.XQST0033, "Cannot bind prefix '" + prefix + "' to '" + uri + "' it is already bound to '" + prevURI + "'");
}
}
}
}
@Override
public void declareNamespaces(final Map<String, String> namespaceMap) {
for (final Map.Entry<String, String> entry : namespaceMap.entrySet()) {
String prefix = entry.getKey();
String uri = entry.getValue();
if (prefix == null) {
prefix = "";
}
if (uri == null) {
uri = "";
}
staticNamespaces.put(prefix, uri);
staticPrefixes.put(uri, prefix);
}
}
@Override
public void removeNamespace(final String uri) {
staticPrefixes.remove(uri);
for (final Iterator<String> i = staticNamespaces.values().iterator(); i.hasNext(); ) {
if (i.next().equals(uri)) {
i.remove();
return;
}
}
inScopePrefixes.remove(uri);
if (inScopeNamespaces != null) {
for (final Iterator<String> i = inScopeNamespaces.values().iterator(); i.hasNext(); ) {
if (i.next().equals(uri)) {
i.remove();
return;
}
}
}
inheritedInScopePrefixes.remove(uri);
if (inheritedInScopeNamespaces != null) {
for (final Iterator<String> i = inheritedInScopeNamespaces.values().iterator(); i.hasNext(); ) {
if (i.next().equals(uri)) {
i.remove();
return;
}
}
}
}
@Override
public void declareInScopeNamespace(final String prefix, final String uri) {
if (prefix == null || uri == null) {
throw new IllegalArgumentException("null argument passed to declareNamespace");
}
//Activate the namespace by removing it from the inherited namespaces
if (inheritedInScopePrefixes.containsKey(getURIForPrefix(prefix))) {
inheritedInScopePrefixes.remove(uri);
}
inheritedInScopeNamespaces.remove(prefix);
inScopePrefixes.put(uri, prefix);
inScopeNamespaces.put(prefix, uri);
}
@Override
public String getInScopeNamespace(final String prefix) {
return inScopeNamespaces == null ? null : inScopeNamespaces.get(prefix);
}
@Override
public String getInScopePrefix(final String uri) {
return inScopePrefixes == null ? null : inScopePrefixes.get(uri);
}
public Map<String, String> getInScopePrefixes() {
return inScopePrefixes;
}
@Override
public String getInheritedNamespace(final String prefix) {
return inheritedInScopeNamespaces == null ? null : inheritedInScopeNamespaces.get(prefix);
}
@Override
public String getInheritedPrefix(final String uri) {
return inheritedInScopePrefixes == null ? null : inheritedInScopePrefixes.get(uri);
}
@Override
public String getURIForPrefix(final String prefix) {
// try in-scope namespace declarations
String uri = (inScopeNamespaces == null) ? null : inScopeNamespaces.get(prefix);
if (uri != null) {
return uri;
}
if (inheritNamespaces) {
uri = (inheritedInScopeNamespaces == null) ? null : inheritedInScopeNamespaces.get(prefix);
if (uri != null) {
return uri;
}
}
return staticNamespaces.get(prefix);
/* old code checked namespaces first
String ns = (String) namespaces.get(prefix);
if (ns == null)
// try in-scope namespace declarations
return inScopeNamespaces == null
? null
: (String) inScopeNamespaces.get(prefix);
else
return ns;
*/
}
@Override
public String getPrefixForURI(final String uri) {
String prefix = (inScopePrefixes == null) ? null : inScopePrefixes.get(uri);
if (prefix != null) {
return prefix;
}
if (inheritNamespaces) {
prefix = (inheritedInScopePrefixes == null) ? null : inheritedInScopePrefixes.get(uri);
if (prefix != null) {
return prefix;
}
}
return staticPrefixes.get(uri);
}
@Override
public String getDefaultFunctionNamespace() {
return defaultFunctionNamespace;
}
@Override
public void setDefaultFunctionNamespace(final String uri) throws XPathException {
//Not sure for the 2nd clause : eXist-db forces the function NS as default.
if ((defaultFunctionNamespace != null) && !defaultFunctionNamespace.equals(Function.BUILTIN_FUNCTION_NS) && !defaultFunctionNamespace.equals(uri)) {
throw new XPathException(ErrorCodes.XQST0066, "Default function namespace is already set to: '" + defaultFunctionNamespace + "'");
}
defaultFunctionNamespace = uri;
}
@Override
public String getDefaultElementNamespaceSchema() throws XPathException {
return defaultElementNamespaceSchema.getStringValue();
}
@Override
public void setDefaultElementNamespaceSchema(final String uri) throws XPathException {
// eXist forces the empty element NS as default.
if (!defaultElementNamespaceSchema.equals(AnyURIValue.EMPTY_URI)) {
throw new XPathException(ErrorCodes.XQST0066, "Default function namespace schema is already set to: '" + defaultElementNamespaceSchema.getStringValue() + "'");
}
defaultElementNamespaceSchema = new AnyURIValue(uri);
}
@Override
public String getDefaultElementNamespace() throws XPathException {
return defaultElementNamespace.getStringValue();
}
@Override
public void setDefaultElementNamespace(final String uri, @Nullable final String schema) throws XPathException {
// eXist forces the empty element NS as default.
if (!defaultElementNamespace.equals(AnyURIValue.EMPTY_URI)) {
throw new XPathException(ErrorCodes.XQST0066,
"Default element namespace is already set to: '" + defaultElementNamespace.getStringValue() + "'");
}
defaultElementNamespace = new AnyURIValue(uri);
if (schema != null) {
defaultElementNamespaceSchema = new AnyURIValue(schema);
}
}
@Override
public void setDefaultCollation(final String uri) throws XPathException {
if (uri.equals(Collations.UNICODE_CODEPOINT_COLLATION_URI) || uri.equals(Collations.CODEPOINT_SHORT)) {
defaultCollation = Collations.UNICODE_CODEPOINT_COLLATION_URI;
defaultCollator = null;
}
final URI uriTest;
try {
uriTest = new URI(uri);
} catch (final URISyntaxException e) {
throw new XPathException(ErrorCodes.XQST0038, "Unknown collation : '" + uri + "'");
}
if (uri.startsWith(Collations.EXIST_COLLATION_URI) || uri.charAt(0) == '?' || uriTest.isAbsolute()) {
defaultCollator = Collations.getCollationFromURI(uri);
defaultCollation = uri;
} else {
String absUri = getBaseURI().getStringValue() + uri;
defaultCollator = Collations.getCollationFromURI(absUri);
defaultCollation = absUri;
}
}
@Override
public String getDefaultCollation() {
return defaultCollation;
}
@Override
public Collator getCollator(final String uri) throws XPathException {
if (uri == null) {
return defaultCollator;
}
return Collations.getCollationFromURI(uri);
}
@Override
public Collator getDefaultCollator() {
return defaultCollator;
}
@Override
public void setStaticallyKnownDocuments(final XmldbURI[] docs) {
staticDocumentPaths = docs;
}
@Override
public void setStaticallyKnownDocuments(final DocumentSet set) {
staticDocuments = set;
}
public void addDynamicallyAvailableDocument(final String uri,
final TriFunctionE<DBBroker, Txn, String, Either<org.exist.dom.memtree.DocumentImpl, DocumentImpl>, XPathException> supplier) {
if (dynamicDocuments == null) {
dynamicDocuments = new HashMap<>();
}
dynamicDocuments.put(uri, supplier);
}
public void addDynamicallyAvailableTextResource(final String uri, final Charset encoding,
final QuadFunctionE<DBBroker, Txn, String, Charset, Reader, XPathException> supplier) {
if (dynamicTextResources == null) {
dynamicTextResources = new HashMap<>();
}
dynamicTextResources.put(Tuple(uri, encoding), supplier);
}
public void addDynamicallyAvailableCollection(final String uri,
final TriFunctionE<DBBroker, Txn, String, Sequence, XPathException> supplier) {
if (dynamicCollections == null) {
dynamicCollections = new HashMap<>();
}
dynamicCollections.put(uri, supplier);
}
@Override
public void setCalendar(final XMLGregorianCalendar newCalendar) {
this.calendar = (XMLGregorianCalendar) newCalendar.clone();
}
@Override
public void setTimeZone(final TimeZone newTimeZone) {
this.implicitTimeZone = newTimeZone;
}
@Override
public XMLGregorianCalendar getCalendar() {
//TODO : we might prefer to return null
if (calendar == null) {
try {
//Initialize to current dateTime
calendar = DatatypeFactory.newInstance().newXMLGregorianCalendar(new GregorianCalendar());
} catch (final DatatypeConfigurationException e) {
LOG.error(e.getMessage(), e);
}
}
//That's how we ensure stability of that static context function
return calendar;
}
@Override
public TimeZone getImplicitTimeZone() {
if (implicitTimeZone == null) {
implicitTimeZone = TimeZone.getDefault();
if (implicitTimeZone.inDaylightTime(new Date())) {
implicitTimeZone.setRawOffset(implicitTimeZone.getRawOffset() + implicitTimeZone.getDSTSavings());
}
}
//That's how we ensure stability of that static context function
return this.implicitTimeZone;
}
@Override
public DocumentSet getStaticallyKnownDocuments() throws XPathException {
if (staticDocuments != null) {
// the document set has already been built, return it
return staticDocuments;
}
if (protectedDocuments != null) {
staticDocuments = protectedDocuments.toDocumentSet();
return staticDocuments;
}
final MutableDocumentSet ndocs = new DefaultDocumentSet(40);
if (staticDocumentPaths == null) {
// no path defined: return all documents in the db
try {
getBroker().getAllXMLResources(ndocs);
} catch (final PermissionDeniedException | LockException e) {
LOG.warn(e);
throw new XPathException("Permission denied to read resource all resources: " + e.getMessage(), e);
}
} else {
for (final XmldbURI staticDocumentPath : staticDocumentPaths) {
try {
final Collection collection = getBroker().getCollection(staticDocumentPath);
if (collection != null) {
collection.allDocs(getBroker(), ndocs, true);
} else {
try (final LockedDocument lockedDocument = getBroker().getXMLResource(staticDocumentPath, LockMode.READ_LOCK)) {
final DocumentImpl doc = lockedDocument == null ? null : lockedDocument.getDocument();
if (doc != null) {
if (doc.getPermissions().validate(
getBroker().getCurrentSubject(), Permission.READ)) {
ndocs.add(doc);
}
}
}
}
} catch (final PermissionDeniedException | LockException e) {
LOG.warn("Permission denied to read resource " + staticDocumentPath + ". Skipping it.");
}
}
}
staticDocuments = ndocs;
return staticDocuments;
}
public DocumentSet getStaticDocs() {
return staticDocuments;
}
/**
* Get's a document from the "Available documents" of the
* dynamic context.
*
* @param uri the URI by which the document was registered
* @return sequence of available documents matching the URI
* @throws XPathException in case of dynamic error
*/
public @Nullable
Sequence getDynamicallyAvailableDocument(final String uri) throws XPathException {
if (dynamicDocuments == null) {
return null;
}
final TriFunctionE<DBBroker, Txn, String, Either<org.exist.dom.memtree.DocumentImpl, DocumentImpl>, XPathException> docSupplier
= dynamicDocuments.get(uri);
if (docSupplier == null) {
return null;
}
return docSupplier.apply(getBroker(), getBroker().getCurrentTransaction(), uri).fold(md -> md, pd -> (Sequence) pd);
}
/**
* Get's a text resource from the "Available text resources" of the
* dynamic context.
*
* @param uri the URI by which the document was registered
* @param charset the charset to use for retrieving the resource
* @return a reader to read the resource content from
* @throws XPathException in case of a dynamic error
*/
public @Nullable
Reader getDynamicallyAvailableTextResource(final String uri, final Charset charset)
throws XPathException {
if (dynamicTextResources == null) {
return null;
}
final QuadFunctionE<DBBroker, Txn, String, Charset, Reader, XPathException> textResourceSupplier
= dynamicTextResources.get(Tuple(uri, charset));
if (textResourceSupplier == null) {
return null;
}
return textResourceSupplier.apply(getBroker(), getBroker().getCurrentTransaction(), uri, charset);
}
/**
* Get's a collection from the "Available collections" of the
* dynamic context.
*
* @param uri the URI of the collection to retrieve
* @return a sequence of document nodes
* @throws XPathException in case of dynamic error
*/
public @Nullable
Sequence getDynamicallyAvailableCollection(final String uri) throws XPathException {
if (dynamicCollections == null) {
return null;
}
final TriFunctionE<DBBroker, Txn, String, Sequence, XPathException> collectionSupplier
= dynamicCollections.get(uri);
if (collectionSupplier == null) {
return null;
}
return collectionSupplier.apply(getBroker(), getBroker().getCurrentTransaction(), uri);
}
@Override
public ExtendedXMLStreamReader getXMLStreamReader(final NodeValue nv) throws XMLStreamException, IOException {
final ExtendedXMLStreamReader reader;
if (nv.getImplementationType() == NodeValue.IN_MEMORY_NODE) {
final NodeImpl node = (NodeImpl) nv;
final org.exist.dom.memtree.DocumentImpl ownerDoc = node.getNodeType() == Node.DOCUMENT_NODE ? (org.exist.dom.memtree.DocumentImpl) node : node.getOwnerDocument();
reader = new InMemoryXMLStreamReader(ownerDoc, ownerDoc);
} else {
final NodeProxy proxy = (NodeProxy) nv;
reader = getBroker().newXMLStreamReader(new NodeProxy(proxy.getOwnerDocument(), NodeId.DOCUMENT_NODE, proxy.getOwnerDocument().getFirstChildAddress()), false);
}
return reader;
}
@Override
public void setProtectedDocs(final LockedDocumentMap map) {
this.protectedDocuments = map;
}
@Override
public LockedDocumentMap getProtectedDocs() {
return this.protectedDocuments;
}
@Override
public boolean inProtectedMode() {
return protectedDocuments != null;
}
@Override
public boolean lockDocumentsOnLoad() {
return false;
}
// /**
// * If lock is true, all documents loaded during query execution
// * will be locked. This way, we avoid that query results become
// * invalid before the entire result has been processed by the client
// * code. All attempts to modify nodes which are part of the result
// * set will be blocked.
// *
// * However, it is the client's responsibility to proper unlock
// * all documents once processing is completed.
// *
// * @param lock
// */
// public void setLockDocumentsOnLoad(boolean lock) {
// lockDocumentsOnLoad = lock;
// if(lock)
// lockedDocuments = new LockedDocumentMap();
// }
@Override
public void addLockedDocument(final DocumentImpl doc) {
// if (lockedDocuments != null)
// lockedDocuments.add(doc);
}
// /**
// * Release all locks on documents that have been locked
// * during query execution.
// *
// *@see #setLockDocumentsOnLoad(boolean)
// */
// public void releaseLockedDocuments() {
// if(lockedDocuments != null)
// lockedDocuments.unlock();
// lockDocumentsOnLoad = false;
// lockedDocuments = null;
// }
// /**
// * Release all locks on documents not being referenced by the sequence.
// * This is called after query execution has completed. Only locks on those
// * documents contained in the final result set will be preserved. All other
// * locks are released as they are no longer needed.
// *
// * @param seq
// * @throws XPathException
// */
// public LockedDocumentMap releaseUnusedDocuments(Sequence seq) throws XPathException {
// if(lockedDocuments == null)
// return null;
// // determine the set of documents referenced by nodes in the sequence
// DocumentSet usedDocs = new DocumentSet();
// for(SequenceIterator i = seq.iterate(); i.hasNext(); ) {
// Item next = i.nextItem();
// if(Type.subTypeOf(next.getType(), Type.NODE)) {
// NodeValue node = (NodeValue) next;
// if(node.getImplementationType() == NodeValue.PERSISTENT_NODE) {
// DocumentImpl doc = ((NodeProxy)node).getDocument();
// if(!usedDocs.contains(doc.getDocId()))
// usedDocs.add(doc, false);
// }
// }
// }
// LockedDocumentMap remaining = lockedDocuments.unlockSome(usedDocs);
// lockDocumentsOnLoad = false;
// lockedDocuments = null;
// return remaining;
// }
@Override
public void setShared(final boolean shared) {
isShared = shared;
}
@Override
public boolean isShared() {
return isShared;
}
@Override
public void addModifiedDoc(final DocumentImpl document) {
if (modifiedDocuments == null) {
modifiedDocuments = new DefaultDocumentSet();
}
modifiedDocuments.add(document);
}
@Override
public void reset() {
reset(false);
}
@Override
public void reset(final boolean keepGlobals) {
setRealUser(null);
if (this.pushedUserFromHttpSession) {
try {
getBroker().popSubject();
} finally {
this.pushedUserFromHttpSession = false;
}
}
if (modifiedDocuments != null) {
try {
Modification.checkFragmentation(this, modifiedDocuments);
} catch (final LockException | EXistException e) {
LOG.warn("Error while checking modified documents: " + e.getMessage(), e);
}
modifiedDocuments = null;
}
calendar = null;
implicitTimeZone = null;
resetDocumentBuilder();
contextSequence = null;
contextItem = Sequence.EMPTY_SEQUENCE;
if (!keepGlobals) {
// do not reset the statically known documents
staticDocumentPaths = null;
staticDocuments = null;
dynamicDocuments = null;
dynamicTextResources = null;
dynamicCollections = null;
}
if (!isShared) {
lastVar = null;
}
// clear inline functions using closures
closures.forEach(func -> func.setClosureVariables(null));
closures.clear();
fragmentStack = new ArrayDeque<>();
callStack.clear();
protectedDocuments = null;
if (!keepGlobals) {
globalVariables.clear();
}
if (dynamicOptions != null) {
dynamicOptions.clear(); //clear any dynamic options
}
if (!isShared) {
watchdog.reset();
}
for (final Module module : allModules.values()) {
module.reset(this, keepGlobals);
}
if (!keepGlobals) {
mappedModules.clear();
}
savedState.restore();
attributes.clear();
clearUpdateListeners();
profiler.reset();
if (!keepGlobals) {
httpContext = null;
}
analyzed = false;
}
@Override
public boolean stripWhitespace() {
return stripWhitespace;
}
@Override
public void setStripWhitespace(final boolean strip) {
this.stripWhitespace = strip;
}
@Override
public boolean preserveNamespaces() {
return preserveNamespaces;
}
@Override
public void setPreserveNamespaces(final boolean preserve) {
this.preserveNamespaces = preserve;
}
@Override
public boolean inheritNamespaces() {
return inheritNamespaces;
}
@Override
public void setInheritNamespaces(final boolean inherit) {
this.inheritNamespaces = inherit;
}
@Override
public boolean orderEmptyGreatest() {
return orderEmptyGreatest;
}
@Override
public void setOrderEmptyGreatest(final boolean order) {
this.orderEmptyGreatest = order;
}
@Override
public Iterator<Module> getModules() {
return modules.values().iterator();
}
@Override
public Iterator<Module> getRootModules() {
return getAllModules();
}
@Override
public Iterator<Module> getAllModules() {
return allModules.values().iterator();
}
@Override
@Nullable
public Module getModule(final String namespaceURI) {
return modules.get(namespaceURI);
}
@Override
public Module getRootModule(final String namespaceURI) {
return allModules.get(namespaceURI);
}
@Override
public void setModule(final String namespaceURI, final Module module) {
if (module == null) {
modules.remove(namespaceURI); // unbind the module
} else {
modules.put(namespaceURI, module);
}
setRootModule(namespaceURI, module);
}
protected void setRootModule(final String namespaceURI, final Module module) {
if (module == null) {
allModules.remove(namespaceURI); // unbind the module
return;
}
if (allModules.get(namespaceURI) != module) {
setModulesChanged();
}
allModules.put(namespaceURI, module);
}
protected void setModulesChanged() {
this.modulesChanged = true;
}
@Override
public boolean checkModulesValid() {
for (final Module module : allModules.values()) {
if (!module.isInternalModule()) {
if (!((ExternalModule) module).moduleIsValid(getBroker())) {
if (LOG.isDebugEnabled()) {
LOG.debug("Module with URI " + module.getNamespaceURI() + " has changed and needs to be reloaded");
}
return false;
}
}
}
return true;
}
@Override
public void analyzeAndOptimizeIfModulesChanged(final Expression expr) throws XPathException {
if (analyzed) {
return;
}
analyzed = true;
for (final Module module : expr.getContext().modules.values()) {
if (!module.isInternalModule()) {
final Expression root = ((ExternalModule) module).getRootExpression();
((ExternalModule) module).getContext().analyzeAndOptimizeIfModulesChanged(root);
}
}
expr.analyze(new AnalyzeContextInfo());
if (optimizationsEnabled()) {
final Optimizer optimizer = new Optimizer(this);
expr.accept(optimizer);
if (optimizer.hasOptimized()) {
reset(true);
expr.resetState(true);
expr.analyze(new AnalyzeContextInfo());
}
}
modulesChanged = false;
}
@Override
@Nullable
public Module loadBuiltInModule(final String namespaceURI, final String moduleClass) {
Module module = null;
if (namespaceURI != null) {
module = getModule(namespaceURI);
}
if (module != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("module " + namespaceURI + " is already present");
}
return module;
}
return initBuiltInModule(namespaceURI, moduleClass);
}
@SuppressWarnings("unchecked")
Module initBuiltInModule(final String namespaceURI, final String moduleClass) {
Module module = null;
try {
// lookup the class
final ClassLoader existClassLoader = getBroker().getBrokerPool().getClassLoader();
final Class<?> mClass = Class.forName(moduleClass, false, existClassLoader);
if (!(Module.class.isAssignableFrom(mClass))) {
LOG.info("failed to load module. " + moduleClass + " is not an instance of org.exist.xquery.Module.");
return null;
}
//instantiateModule( namespaceURI, (Class<Module>)mClass );
// INOTE: expathrepo
module = instantiateModule(namespaceURI, (Class<Module>) mClass, (Map<String, Map<String, List<? extends Object>>>) getBroker().getConfiguration().getProperty(PROPERTY_MODULE_PARAMETERS));
if (LOG.isDebugEnabled()) {
LOG.debug("module " + module.getNamespaceURI() + " loaded successfully.");
}
} catch (final ClassNotFoundException e) {
LOG.warn("module class " + moduleClass + " not found. Skipping...");
}
return module;
}
@SuppressWarnings("unchecked")
private Module instantiateModule(final String namespaceURI, final Class<Module> mClazz,
final Map<String, Map<String, List<? extends Object>>> moduleParameters) {
Module module = null;
try {
try {
// attempt for a constructor that takes 1 argument
final Constructor<Module> cstr1 = mClazz.getConstructor(Map.class);
module = cstr1.newInstance(moduleParameters.get(namespaceURI));
} catch (final NoSuchMethodException nsme) {
// attempt for a constructor that takes 0 arguments
module = mClazz.newInstance();
}
if (namespaceURI != null && !module.getNamespaceURI().equals(namespaceURI)) {
LOG.warn("the module declares a different namespace URI. Expected: " + namespaceURI + " found: " + module.getNamespaceURI());
return null;
}
if (getPrefixForURI(module.getNamespaceURI()) == null && !module.getDefaultPrefix().isEmpty()) {
declareNamespace(module.getDefaultPrefix(), module.getNamespaceURI());
}
modules.put(module.getNamespaceURI(), module);
allModules.put(module.getNamespaceURI(), module);
if (module instanceof InternalModule) {
((InternalModule) module).prepare(this);
}
} catch (final Throwable e) {
if (e instanceof InterruptedException) {
// NOTE: must set interrupted flag
Thread.currentThread().interrupt();
}
LOG.warn("error while instantiating module class " + mClazz.getName(), e);
}
return module;
}
@Override
public void declareFunction(final UserDefinedFunction function) throws XPathException {
// TODO: redeclaring functions should be forbidden. however, throwing an
// exception will currently break util:eval.
final QName name = function.getSignature().getName();
if (XML_NS.equals(name.getNamespaceURI())) {
throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + XML_NS + "'");
}
if (Namespaces.SCHEMA_NS.equals(name.getNamespaceURI())) {
throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.SCHEMA_NS + "'");
}
if (Namespaces.SCHEMA_INSTANCE_NS.equals(name.getNamespaceURI())) {
throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.SCHEMA_INSTANCE_NS + "'");
}
if (Namespaces.XPATH_FUNCTIONS_NS.equals(name.getNamespaceURI())) {
throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.XPATH_FUNCTIONS_NS + "'");
}
if (name.getNamespaceURI().isEmpty()) {
throw new XPathException(function, ErrorCodes.XQST0060, "Every declared function name must have a non-null namespace URI, but function '" + name + "' does not meet this requirement.");
}
declaredFunctions.put(function.getSignature().getFunctionId(), function);
// if (declaredFunctions.get(function.getSignature().getFunctionId()) == null)
// declaredFunctions.put(function.getSignature().getFunctionId(), function);
// else
// throw new XPathException("XQST0034: function " + function.getName() + " is already defined with the same arity");
}
@Override
@Nullable
public UserDefinedFunction resolveFunction(final QName name, final int argCount) throws XPathException {
final FunctionId id = new FunctionId(name, argCount);
return declaredFunctions.get(id);
}
@Override
public Iterator<FunctionSignature> getSignaturesForFunction(final QName name) {
final ArrayList<FunctionSignature> signatures = new ArrayList<>(2);
for (final UserDefinedFunction func : declaredFunctions.values()) {
if (func.getName().equals(name)) {
signatures.add(func.getSignature());
}
}
return signatures.iterator();
}
@Override
public Iterator<UserDefinedFunction> localFunctions() {
return declaredFunctions.values().iterator();
}
@Override
public LocalVariable declareVariableBinding(final LocalVariable var) throws XPathException {
if (lastVar == null) {
lastVar = var;
} else {
lastVar.addAfter(var);
lastVar = var;
}
var.setStackPosition(getCurrentStackSize());
return var;
}
@Override
public Variable declareGlobalVariable(final Variable var) {
globalVariables.put(var.getQName(), var);
var.setStackPosition(getCurrentStackSize());
return var;
}
@Override
public void undeclareGlobalVariable(final QName name) {
globalVariables.remove(name);
}
@Override
public Variable declareVariable(final String qname, final Object value) throws XPathException {
try {
return declareVariable(QName.parse(this, qname, null), value);
} catch (final QName.IllegalQNameException e) {
throw new XPathException(ErrorCodes.XPST0081, "No namespace defined for prefix: " + qname);
}
}
@Override
public Variable declareVariable(final QName qn, final Object value) throws XPathException {
Variable var;
final Module module = getModule(qn.getNamespaceURI());
if (module != null) {
var = module.declareVariable(qn, value);
return var;
}
final Sequence val = XPathUtil.javaObjectToXPath(value, this);
var = globalVariables.get(qn);
if (var == null) {
var = new VariableImpl(qn);
globalVariables.put(qn, var);
}
if (var.getSequenceType() != null) {
int actualCardinality;
if (val.isEmpty()) {
actualCardinality = Cardinality.EMPTY;
} else if (val.hasMany()) {
actualCardinality = Cardinality.MANY;
} else {
actualCardinality = Cardinality.ONE;
}
//Type.EMPTY is *not* a subtype of other types ; checking cardinality first
if (!Cardinality.checkCardinality(var.getSequenceType().getCardinality(), actualCardinality)) {
throw new XPathException("XPTY0004: Invalid cardinality for variable $" + var.getQName() + ". Expected " + Cardinality.getDescription(var.getSequenceType().getCardinality()) + ", got " + Cardinality.getDescription(actualCardinality));
}
//TODO : ignore nodes right now ; they are returned as xs:untypedAtomicType
if (!Type.subTypeOf(var.getSequenceType().getPrimaryType(), Type.NODE)) {
if (!val.isEmpty() && !Type.subTypeOf(val.getItemType(), var.getSequenceType().getPrimaryType())) {
throw new XPathException("XPTY0004: Invalid type for variable $" + var.getQName() + ". Expected " + Type.getTypeName(var.getSequenceType().getPrimaryType()) + ", got " + Type.getTypeName(val.getItemType()));
}
//Here is an attempt to process the nodes correctly
} else {
//Same as above : we probably may factorize
if (!val.isEmpty() && !Type.subTypeOf(val.getItemType(), var.getSequenceType().getPrimaryType())) {
throw new XPathException("XPTY0004: Invalid type for variable $" + var.getQName() + ". Expected " + Type.getTypeName(var.getSequenceType().getPrimaryType()) + ", got " + Type.getTypeName(val.getItemType()));
}
}
}
//TODO : should we allow global variable *re*declaration ?
var.setValue(val);
return var;
}
@Override
public Variable resolveVariable(final String name) throws XPathException {
try {
final QName qn = QName.parse(this, name, null);
return resolveVariable(qn);
} catch (final QName.IllegalQNameException e) {
throw new XPathException(ErrorCodes.XPST0081, "No namespace defined for prefix " + name);
}
}
@Override
public Variable resolveVariable(final QName qname) throws XPathException {
// check if the variable is declared local
Variable var = resolveLocalVariable(qname);
// check if the variable is declared in a module
if (var == null) {
final Module module = getModule(qname.getNamespaceURI());
if (module != null) {
var = module.resolveVariable(qname);
}
}
// check if the variable is declared global
if (var == null) {
var = globalVariables.get(qname);
}
//if (var == null)
// throw new XPathException("variable $" + qname + " is not bound");
return var;
}
Variable resolveGlobalVariable(final QName qname) {
return globalVariables.get(qname);
}
protected Variable resolveLocalVariable(final QName qname) throws XPathException {
final LocalVariable end = contextStack.peek();
for (LocalVariable var = lastVar; var != null; var = var.before) {
if (var == end) {
return null;
}
if (qname.equals(var.getQName())) {
return var;
}
}
return null;
}
@Override
public boolean isVarDeclared(final QName qname) {
final Module module = getModule(qname.getNamespaceURI());
if (module != null) {
if (module.isVarDeclared(qname)) {
return true;
}
}
return globalVariables.get(qname) != null;
}
@Override
public Map<QName, Variable> getVariables() {
final Map<QName, Variable> variables = new HashMap<>(globalVariables);
LocalVariable end = contextStack.peek();
for (LocalVariable var = lastVar; var != null; var = var.before) {
if (var == end) {
break;
}
variables.put(var.getQName(), var);
}
return variables;
}
@Override
public Map<QName, Variable> getLocalVariables() {
final Map<QName, Variable> variables = new HashMap<>();
LocalVariable end = contextStack.peek();
for (LocalVariable var = lastVar; var != null; var = var.before) {
if (var == end) {
break;
}
variables.put(var.getQName(), var);
}
return variables;
}
/**
* Return a copy of all currently visible local variables.
* Used by {@link InlineFunction} to implement closures.
*
* @return currently visible local variables as a stack
*/
public List<ClosureVariable> getLocalStack() {
List<ClosureVariable> closure = null;
final LocalVariable end = contextStack.peek();
for (LocalVariable var = lastVar; var != null; var = var.before) {
if (var == end) {
break;
}
if (closure == null) {
closure = new ArrayList<>(6);
}
closure.add(new ClosureVariable(var));
}
return closure;
}
@Override
public Map<QName, Variable> getGlobalVariables() {
return new HashMap<>(globalVariables);
}
/**
* Restore a saved stack of local variables. Used to implement closures.
*
* @param stack the stack of local variables
* @throws XPathException if the stack cannot be restored
*/
public void restoreStack(final List<ClosureVariable> stack) throws XPathException {
for (int i = stack.size() - 1; i > -1; i--) {
declareVariableBinding(new ClosureVariable(stack.get(i)));
}
}
@Override
public void setBackwardsCompatibility(boolean backwardsCompatible) {
this.backwardsCompatible = backwardsCompatible;
}
@Override
public boolean isBackwardsCompatible() {
return this.backwardsCompatible;
}
@Override
public boolean isRaiseErrorOnFailedRetrieval() {
return raiseErrorOnFailedRetrieval;
}
public Database getDatabase() {
return db;
}
@Override
public DBBroker getBroker() {
return db.getActiveBroker();
}
@Override
public Subject getSubject() {
return getBroker().getCurrentSubject();
}
/**
* If there is a HTTP Session, and a User has been stored in the session then this will return the user object from the session.
*
* @return The user or null if there is no session or no user
*/
Subject getUserFromHttpSession() {
final Optional<RequestWrapper> maybeRequest = Optional.ofNullable(getHttpContext())
.map(HttpContext::getRequest);
if (maybeRequest.isPresent()) {
final RequestWrapper request = maybeRequest.get();
final Object user = request.getAttribute(HTTP_REQ_ATTR_USER);
final Object passAttr = request.getAttribute(HTTP_REQ_ATTR_PASS);
if (user != null) {
final String password = passAttr == null ? null : passAttr.toString();
try {
return getBroker().getBrokerPool().getSecurityManager().authenticate(user.toString(), password);
} catch (final AuthenticationException e) {
LOG.error("User can not be authenticated: " + user.toString());
}
} else {
final Optional<SessionWrapper> maybeSession = Optional.ofNullable(getHttpContext())
.map(HttpContext::getSession);
if (maybeSession.isPresent()) {
return (Subject) maybeSession.get().getAttribute(HTTP_SESSIONVAR_XMLDB_USER);
}
}
}
return null;
}
/**
* The builder used for creating in-memory document fragments.
*/
private MemTreeBuilder documentBuilder = null;
@Override
public MemTreeBuilder getDocumentBuilder() {
if (documentBuilder == null) {
documentBuilder = new MemTreeBuilder(this);
documentBuilder.startDocument();
}
return documentBuilder;
}
@Override
public MemTreeBuilder getDocumentBuilder(final boolean explicitCreation) {
if (documentBuilder == null) {
documentBuilder = new MemTreeBuilder(this);
documentBuilder.startDocument(explicitCreation);
}
return documentBuilder;
}
private void resetDocumentBuilder() {
this.documentBuilder = null;
}
private void setDocumentBuilder(final MemTreeBuilder documentBuilder) {
this.documentBuilder = documentBuilder;
}
@Override
public NamePool getSharedNamePool() {
if (sharedNamePool == null) {
sharedNamePool = new NamePool();
}
return sharedNamePool;
}
@Override
public XQueryContext getContext() {
return null;
}
@Override
public void prologEnter(final Expression expr) {
if (debuggeeJoint != null) {
debuggeeJoint.prologEnter(expr);
}
}
@Override
public void expressionStart(final Expression expr) throws TerminatedException {
if (debuggeeJoint != null) {
debuggeeJoint.expressionStart(expr);
}
}
@Override
public void expressionEnd(final Expression expr) {
if (debuggeeJoint != null) {
debuggeeJoint.expressionEnd(expr);
}
}
@Override
public void stackEnter(final Expression expr) throws TerminatedException {
if (debuggeeJoint != null) {
debuggeeJoint.stackEnter(expr);
}
}
@Override
public void stackLeave(final Expression expr) {
if (debuggeeJoint != null) {
debuggeeJoint.stackLeave(expr);
}
}
@Override
public void proceed() throws TerminatedException {
getWatchDog().proceed(null);
}
@Override
public void proceed(final Expression expr) throws TerminatedException {
getWatchDog().proceed(expr);
}
@Override
public void proceed(final Expression expr, final MemTreeBuilder builder) throws TerminatedException {
getWatchDog().proceed(expr, builder);
}
@Override
public void setWatchDog(final XQueryWatchDog watchdog) {
this.watchdog = watchdog;
}
@Override
public XQueryWatchDog getWatchDog() {
return watchdog;
}
private static final MemTreeBuilder NULL_DOCUMENT_BUILDER = new MemTreeBuilder();
@Override
public void pushDocumentContext() {
if (documentBuilder == null) {
fragmentStack.push(NULL_DOCUMENT_BUILDER);
} else {
fragmentStack.push(documentBuilder);
resetDocumentBuilder();
}
}
@Override
public void popDocumentContext() {
if (!fragmentStack.isEmpty()) {
final MemTreeBuilder prevBuilder = fragmentStack.pop();
if (prevBuilder == NULL_DOCUMENT_BUILDER) {
setDocumentBuilder(null);
} else {
setDocumentBuilder(prevBuilder);
}
}
}
@Override
public void setBaseURI(final AnyURIValue uri) {
setBaseURI(uri, false);
}
@Override
public void setBaseURI(final AnyURIValue uri, final boolean setInProlog) {
if (baseURISetInProlog) {
return;
}
if (uri == null) {
baseURI = AnyURIValue.EMPTY_URI;
}
baseURI = uri;
baseURISetInProlog = setInProlog;
}
@Override
public void setModuleLoadPath(final String path) {
this.moduleLoadPath = path;
}
@Override
public String getModuleLoadPath() {
return moduleLoadPath;
}
@Override
public boolean isBaseURIDeclared() {
return baseURI != null && !baseURI.equals(AnyURIValue.EMPTY_URI);
}
@Override
public AnyURIValue getBaseURI() throws XPathException {
// the base URI in the static context is established according to the
// principles outlined in [RFC3986] Section 5.1—that is, it defaults
// first to the base URI of the encapsulating entity, then to the URI
// used to retrieve the entity, and finally to an implementation-defined
// default. If the URILiteral in the base URI declaration is a relative
// URI, then it is made absolute by resolving it with respect to this
// same hierarchy.
// It is not intrinsically an error if this process fails to establish
// an absolute base URI; however, the base URI in the static context
// is then undefined, and any attempt to use its value may result in
// an error [err:XPST0001].
// if ((baseURI == null) || baseURI.equals(AnyURIValue.EMPTY_URI)) {
// //throw new XPathException(ErrorCodes.XPST0001, "Base URI of the static context has not been assigned a value.");
// // We catch and resolve this to the XmlDbURI.ROOT_COLLECTION_URI
// // at least in DocumentImpl so maybe we should do it here./ljo
// }
return baseURI;
}
@Override
public void setContextSequencePosition(final int pos, final Sequence sequence) {
contextPosition = pos;
contextSequence = sequence;
}
@Override
public int getContextPosition() {
return contextPosition;
}
@Override
public Sequence getContextSequence() {
return contextSequence;
}
@Override
public void pushInScopeNamespaces() {
pushInScopeNamespaces(true);
}
@Override
public void pushInScopeNamespaces(final boolean inherit) {
//TODO : push into an inheritedInScopeNamespaces HashMap... and return an empty HashMap
final Map<String, String> m = new HashMap<>(inScopeNamespaces);
final Map<String, String> p = new HashMap<>(inScopePrefixes);
namespaceStack.push(inheritedInScopeNamespaces);
namespaceStack.push(inheritedInScopePrefixes);
namespaceStack.push(inScopeNamespaces);
namespaceStack.push(inScopePrefixes);
//Current namespaces now become inherited just like the previous inherited ones
if (inherit) {
inheritedInScopeNamespaces = new HashMap<>(inheritedInScopeNamespaces);
inheritedInScopeNamespaces.putAll(m);
inheritedInScopePrefixes = new HashMap<>(inheritedInScopePrefixes);
inheritedInScopePrefixes.putAll(p);
} else {
inheritedInScopeNamespaces = new HashMap<>();
inheritedInScopePrefixes = new HashMap<>();
}
//TODO : consider dynamic instanciation
inScopeNamespaces = new HashMap<>();
inScopePrefixes = new HashMap<>();
}
@Override
public void popInScopeNamespaces() {
inScopePrefixes = namespaceStack.pop();
inScopeNamespaces = namespaceStack.pop();
inheritedInScopePrefixes = namespaceStack.pop();
inheritedInScopeNamespaces = namespaceStack.pop();
}
@Override
public void pushNamespaceContext() {
final Map<String, String> m = new HashMap<>(staticNamespaces);
final Map<String, String> p = new HashMap<>(staticPrefixes);
namespaceStack.push(staticNamespaces);
namespaceStack.push(staticPrefixes);
staticNamespaces = m;
staticPrefixes = p;
}
@Override
public void popNamespaceContext() {
staticPrefixes = namespaceStack.pop();
staticNamespaces = namespaceStack.pop();
}
@Override
public LocalVariable markLocalVariables(final boolean newContext) {
if (newContext) {
if (lastVar == null) {
lastVar = new LocalVariable(QName.EMPTY_QNAME);
}
contextStack.push(lastVar);
}
variableStackSize++;
return lastVar;
}
@Override
public void popLocalVariables(@Nullable final LocalVariable var) {
popLocalVariables(var, null);
}
/**
* Restore the local variable stack to the position marked by variable var.
*
* @param var only clear variables after this variable, or null
* @param resultSeq the result sequence
*/
public void popLocalVariables(@Nullable final LocalVariable var, final Sequence resultSeq) {
if (var != null) {
// clear all variables registered after var. they should be out of scope.
LocalVariable outOfScope = var.after;
while (outOfScope != null) {
if (outOfScope != var && !outOfScope.isClosureVar()) {
outOfScope.destroy(this, resultSeq);
}
outOfScope = outOfScope.after;
}
// reset the stack
var.after = null;
if (!contextStack.isEmpty() && (var == contextStack.peek())) {
contextStack.pop();
}
}
lastVar = var;
variableStackSize--;
}
/**
* Register a inline function using closure variables so it can be cleared
* after query execution.
*
* @param func an inline function definition using closure variables
*/
void pushClosure(final UserDefinedFunction func) {
closures.add(func);
}
@Override
public int getCurrentStackSize() {
return variableStackSize;
}
@Override
public void functionStart(final FunctionSignature signature) {
callStack.push(signature);
}
@Override
public void functionEnd() {
if (callStack.isEmpty()) {
LOG.warn("Function call stack is empty, but XQueryContext.functionEnd() was called. This "
+ "could indicate a concurrency issue (shared XQueryContext?)");
} else {
callStack.pop();
}
}
@Override
public boolean tailRecursiveCall(final FunctionSignature signature) {
return callStack.contains(signature);
}
@Override
public void mapModule(final String namespace, final XmldbURI uri) {
mappedModules.put(namespace, uri);
}
@Override
public Module importModule(String namespaceURI, String prefix, String location)
throws XPathException {
if (XML_NS_PREFIX.equals(prefix) || XMLNS_ATTRIBUTE.equals(prefix)) {
throw new XPathException(ErrorCodes.XQST0070, "The prefix declared for a module import must not be 'xml' or 'xmlns'.");
}
if (namespaceURI != null && namespaceURI.isEmpty()) {
throw new XPathException(ErrorCodes.XQST0088, "The first URILiteral in a module import must be of nonzero length.");
}
Module module = null;
if (namespaceURI != null) {
module = getRootModule(namespaceURI);
}
if (module != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Module " + namespaceURI + " already present.");
}
// Set locally to remember the dependency in case it was inherited.
setModule(namespaceURI, module);
} else {
// if location is not specified, try to resolve in expath repo
if (location == null && namespaceURI != null) {
module = resolveInEXPathRepository(namespaceURI, prefix);
}
if (module == null) {
if (location == null && namespaceURI != null) {
// check if there's a static mapping in the configuration
location = getModuleLocation(namespaceURI);
if (location == null) {
location = namespaceURI;
}
}
//Is the module's namespace mapped to a URL ?
if (mappedModules.containsKey(location)) {
location = mappedModules.get(location).toString();
}
// is it a Java module?
if (location.startsWith(JAVA_URI_START)) {
location = location.substring(JAVA_URI_START.length());
module = loadBuiltInModule(namespaceURI, location);
} else {
Source moduleSource;
if (location.startsWith(XmldbURI.XMLDB_URI_PREFIX)
|| ((location.indexOf(':') == -1) && moduleLoadPath.startsWith(XmldbURI.XMLDB_URI_PREFIX))) {
// Is the module source stored in the database?
try {
XmldbURI locationUri = XmldbURI.xmldbUriFor(location);
if (moduleLoadPath.startsWith(XmldbURI.XMLDB_URI_PREFIX)) {
final XmldbURI moduleLoadPathUri = XmldbURI.xmldbUriFor(moduleLoadPath);
locationUri = moduleLoadPathUri.resolveCollectionPath(locationUri);
}
try (final LockedDocument lockedSourceDoc = getBroker().getXMLResource(locationUri.toCollectionPathURI(), LockMode.READ_LOCK)) {
final DocumentImpl sourceDoc = lockedSourceDoc == null ? null : lockedSourceDoc.getDocument();
if (sourceDoc == null) {
throw moduleLoadException("Module location hint URI '" + location + "' does not refer to anything.", location);
}
if ((sourceDoc.getResourceType() != DocumentImpl.BINARY_FILE) || !"application/xquery".equals(sourceDoc.getMetadata().getMimeType())) {
throw moduleLoadException("Module location hint URI '" + location + "' does not refer to an XQuery.", location);
}
moduleSource = new DBSource(getBroker(), (BinaryDocument) sourceDoc, true);
// we don't know if the module will get returned, oh well
module = compileOrBorrowModule(prefix, namespaceURI, location, moduleSource);
} catch (final PermissionDeniedException e) {
throw moduleLoadException("Permission denied to read module source from location hint URI '" + location + ".", location, e);
}
} catch (final URISyntaxException e) {
throw moduleLoadException("Invalid module location hint URI '" + location + "'.", location, e);
}
} else {
// No. Load from file or URL
try {
//TODO: use URIs to ensure proper resolution of relative locations
moduleSource = SourceFactory.getSource(getBroker(), moduleLoadPath, location, true);
if (moduleSource == null) {
throw moduleLoadException("Source for module '" + namespaceURI + "' not found module location hint URI '" + location + "'.", location);
}
} catch (final MalformedURLException e) {
throw moduleLoadException("Invalid module location hint URI '" + location + "'.", location, e);
} catch (final IOException e) {
throw moduleLoadException("Source for module '" + namespaceURI + "' could not be read, module location hint URI '" + location + "'.", location, e);
} catch (final PermissionDeniedException e) {
throw moduleLoadException("Permission denied to read module source from location hint URI '" + location + ".", location, e);
}
// we don't know if the module will get returned, oh well
module = compileOrBorrowModule(prefix, namespaceURI, location, moduleSource);
}
}
} // NOTE: expathrepo related, closes the EXPath else (if module != null)
}
if (module != null) {
if (namespaceURI == null) {
namespaceURI = module.getNamespaceURI();
}
if (prefix == null) {
prefix = module.getDefaultPrefix();
}
declareNamespace(prefix, namespaceURI);
}
return module;
}
protected XPathException moduleLoadException(final String message, final String moduleLocation)
throws XPathException {
return new XPathException(ErrorCodes.XQST0059, message, new ValueSequence(new StringValue(moduleLocation)));
}
protected XPathException moduleLoadException(final String message, final String moduleLocation, final Exception e)
throws XPathException {
return new XPathException(ErrorCodes.XQST0059, message, new ValueSequence(new StringValue(moduleLocation)), e);
}
@SuppressWarnings("unchecked")
@Override
public String getModuleLocation(final String namespaceURI) {
final Map<String, String> moduleMap =
(Map) getBroker().getConfiguration().getProperty(PROPERTY_STATIC_MODULE_MAP);
return moduleMap.get(namespaceURI);
}
@SuppressWarnings("unchecked")
@Override
public Iterator<String> getMappedModuleURIs() {
final Map<String, String> moduleMap =
(Map) getBroker().getConfiguration().getProperty(PROPERTY_STATIC_MODULE_MAP);
return moduleMap.keySet().iterator();
}
/**
* Compile of borrow an already compile module from the cache.
*
* @param prefix the module namespace prefix
* @param namespaceURI the module namespace URI
* @param location the location hint
* @param source the source for the module
*
* @return the module or null
*
* @throws XPathException if the module could not be loaded (XQST0059) or compiled (XPST0003)
*/
private ExternalModule compileOrBorrowModule(final String prefix, final String namespaceURI, final String location,
final Source source) throws XPathException {
final ExternalModule module = compileModule(prefix, namespaceURI, location, source);
if (module != null) {
setModule(module.getNamespaceURI(), module);
declareModuleVars(module);
}
return module;
}
/**
* Compile an XQuery Module
*
* @param prefix the namespace prefix of the module.
* @param namespaceURI the namespace URI of the module.
* @param location the location of the module
* @param source the source of the module.
* @return The compiled module, or null if the source is not a module
* @throws XPathException if the module could not be loaded (XQST0059) or compiled (XPST0003)
*/
private @Nullable
ExternalModule compileModule(final String prefix, String namespaceURI, final String location,
final Source source) throws XPathException {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading module from " + location);
}
try (final Reader reader = source.getReader()) {
if (reader == null) {
throw moduleLoadException("failed to load module: '" + namespaceURI + "' from: " +
"'" + source + "', location: '" + location + "'. Source not found. ", location);
}
if (namespaceURI == null) {
final QName qname = source.isModule();
if (qname == null) {
return null;
}
namespaceURI = qname.getNamespaceURI();
}
final ExternalModuleImpl modExternal = new ExternalModuleImpl(namespaceURI, prefix);
setModule(namespaceURI, modExternal);
final XQueryContext modContext = new ModuleContext(this, prefix, namespaceURI, location);
modExternal.setContext(modContext);
final XQueryLexer lexer = new XQueryLexer(modContext, reader);
final XQueryParser parser = new XQueryParser(lexer);
final XQueryTreeParser astParser = new XQueryTreeParser(modContext, modExternal);
try {
parser.xpath();
if (parser.foundErrors()) {
if (LOG.isDebugEnabled()) {
LOG.debug(parser.getErrorMessage());
}
throw new XPathException(ErrorCodes.XPST0003, "error found while loading module from " + location + ": " + parser.getErrorMessage());
}
final AST ast = parser.getAST();
final PathExpr path = new PathExpr(modContext);
astParser.xpath(ast, path);
if (astParser.foundErrors()) {
throw new XPathException(ErrorCodes.XPST0003, "error found while loading module from " + location + ": " + astParser.getErrorMessage(), astParser.getLastException());
}
modExternal.setRootExpression(path);
if (namespaceURI != null && !modExternal.getNamespaceURI().equals(namespaceURI)) {
throw new XPathException(ErrorCodes.XQST0059, "namespace URI declared by module (" + modExternal.getNamespaceURI() + ") does not match namespace URI in import statement, which was: " + namespaceURI);
}
// Set source information on module context
// String sourceClassName = source.getClass().getName();
// modContext.setSourceKey(source.getKey().toString());
// Extract the source type from the classname by removing the package prefix and the "Source" suffix
// modContext.setSourceType( sourceClassName.substring( 17, sourceClassName.length() - 6 ) );
modExternal.setSource(source);
modContext.setSource(source);
modExternal.setIsReady(true);
return modExternal;
} catch (final RecognitionException e) {
throw new XPathException(e.getLine(), e.getColumn(), ErrorCodes.XPST0003, "error found while loading module from " + location + ": " + e.getMessage());
} catch (final TokenStreamException e) {
throw new XPathException(ErrorCodes.XPST0003, "error found while loading module from " + location + ": " + e.getMessage(), e);
} catch (final XPathException e) {
e.prependMessage("Error while loading module " + location + ": ");
throw e;
}
} catch (final IOException e) {
throw moduleLoadException("IO exception while loading module '" + namespaceURI + "'" + " from '" + source + "'", location, e);
}
}
private void declareModuleVars(final Module module) {
final String moduleNS = module.getNamespaceURI();
for (final Iterator<Variable> i = globalVariables.values().iterator(); i.hasNext(); ) {
final Variable var = i.next();
if (moduleNS.equals(var.getQName().getNamespaceURI())) {
module.declareVariable(var);
i.remove();
}
}
}
@Override
public void addForwardReference(final FunctionCall call) {
forwardReferences.add(call);
}
@Override
public void resolveForwardReferences() throws XPathException {
while (!forwardReferences.isEmpty()) {
final FunctionCall call = forwardReferences.pop();
final UserDefinedFunction func = call.getContext().resolveFunction(call.getQName(), call.getArgumentCount());
if (func == null) {
throw new XPathException(call, ErrorCodes.XPST0017, "Call to undeclared function: " + call.getQName().getStringValue());
} else {
call.resolveForwardReference(func);
}
}
}
/**
* Get environment variables. The variables shall not change
* during execution of query.
*
* @return Map of environment variables
*/
public Map<String, String> getEnvironmentVariables() {
if (envs == null) {
envs = System.getenv();
}
return envs;
}
/**
* Gets the Effective user
* i.e. the user that the query is executing as
*
* @return The Effective User
*/
public Subject getEffectiveUser() {
return getBroker().getCurrentSubject();
}
/**
* Gets the Real User
* i.e. the user that initiated execution of the query
* Note this is not necessarily the same as the user that the
* query is executing as
*
* @return The Real User
* @see org.exist.xquery.XQueryContext#getEffectiveUser()
*/
public Subject getRealUser() {
return realUser;
}
private void setRealUser(final Subject realUser) {
this.realUser = realUser;
}
/**
* Get a static decimal format.
*
* @param qnDecimalFormat the name of the decimal format, or null for the UNNAMED format.
*
* @return the decimal format, or null if there is no format matching the name
*/
public @Nullable DecimalFormat getStaticDecimalFormat(@Nullable QName qnDecimalFormat) {
if (qnDecimalFormat == null) {
qnDecimalFormat = UNNAMED_DECIMAL_FORMAT;
}
return staticDecimalFormats.get(qnDecimalFormat);
}
/**
* Set a static decimal format.
*
* @param qnDecimalFormat the name of the decimal format
* @param decimalFormat the decimal format
*/
public void setStaticDecimalFormat(final QName qnDecimalFormat, final DecimalFormat decimalFormat) {
staticDecimalFormats.put(qnDecimalFormat, decimalFormat);
}
/**
* Save state
*/
private class SavedState {
private Map<String, Module> modulesSaved = null;
private Map<String, Module> allModulesSaved = null;
private Map<String, String> staticNamespacesSaved = null;
private Map<String, String> staticPrefixesSaved = null;
@SuppressWarnings("unchecked")
void save() {
if (modulesSaved == null) {
modulesSaved = new HashMap<>(modules);
allModulesSaved = new HashMap(allModules);
staticNamespacesSaved = new HashMap(staticNamespaces);
staticPrefixesSaved = new HashMap(staticPrefixes);
}
}
void restore() {
if (modulesSaved != null) {
modules = modulesSaved;
modulesSaved = null;
allModules = allModulesSaved;
allModulesSaved = null;
staticNamespaces = staticNamespacesSaved;
staticNamespacesSaved = null;
staticPrefixes = staticPrefixesSaved;
staticPrefixesSaved = null;
}
}
}
/**
* Before a dynamic import, make sure relevant parts of the current context a saved
* to the stack. This is important for util:import-module. The context will be restored
* during {@link #reset()}.
*/
public void saveState() {
savedState.save();
}
@Override
public boolean optimizationsEnabled() {
return enableOptimizer;
}
@Override
public void addOption(final String name, final String value) throws XPathException {
if (staticOptions == null) {
staticOptions = new ArrayList<>();
}
addOption(staticOptions, name, value);
}
@Override
public void addDynamicOption(final String name, final String value) throws XPathException {
if (dynamicOptions == null) {
dynamicOptions = new ArrayList<>();
}
addOption(dynamicOptions, name, value);
}
private void addOption(final List<Option> options, final String name, final String value) throws XPathException {
final QName qn;
try {
qn = QName.parse(this, name, defaultFunctionNamespace);
} catch (final QName.IllegalQNameException e) {
throw new XPathException(ErrorCodes.XPST0081, "No namespace defined for prefix " + name);
}
final Option option = new Option(qn, value);
//if the option exists, remove it so we can add the new option
for (int i = 0; i < options.size(); i++) {
if (options.get(i).equals(option)) {
options.remove(i);
break;
}
}
//add option
options.add(option);
// check predefined options
if (Option.PROFILE_QNAME.compareTo(qn) == 0) {
// configure profiling
profiler.configure(option);
} else if (Option.TIMEOUT_QNAME.compareTo(qn) == 0) {
watchdog.setTimeoutFromOption(option);
} else if (Option.OUTPUT_SIZE_QNAME.compareTo(qn) == 0) {
watchdog.setMaxNodesFromOption(option);
} else if (Option.OPTIMIZE_QNAME.compareTo(qn) == 0) {
final String[] params = option.tokenizeContents();
if (params.length > 0) {
final String[] param = Option.parseKeyValuePair(params[0]);
if (param != null && "enable".equals(param[0])) {
enableOptimizer = "yes".equals(param[1]);
}
}
}
//TODO : not sure how these 2 options might/have to be related
else if (Option.OPTIMIZE_IMPLICIT_TIMEZONE.compareTo(qn) == 0) {
//TODO : error check
final Duration duration = TimeUtils.getInstance().newDuration(option.getContents());
implicitTimeZone = new SimpleTimeZone((int) duration.getTimeInMillis(new Date()), "XQuery context");
} else if (Option.CURRENT_DATETIME.compareTo(qn) == 0) {
//TODO : error check
final DateTimeValue dtv = new DateTimeValue(option.getContents());
calendar = (XMLGregorianCalendar) dtv.calendar.clone();
}
}
@Override
public Option getOption(final QName qname) {
if (dynamicOptions != null) {
for (final Option option : dynamicOptions) {
if (qname.compareTo(option.getQName()) == 0) {
return option;
}
}
}
if (staticOptions != null) {
for (final Option option : staticOptions) {
if (qname.compareTo(option.getQName()) == 0) {
return option;
}
}
}
return null;
}
@Override
public Pragma getPragma(final String name, String contents) throws XPathException {
final QName qname;
try {
qname = QName.parse(this, name);
} catch (final QName.IllegalQNameException e) {
throw new XPathException(ErrorCodes.XPST0081, "No namespace defined for prefix " + name);
}
if (qname.getNamespaceURI().isEmpty()) {
throw new XPathException("XPST0081: pragma's ('" + name + "') namespace URI is empty");
} else if (Namespaces.EXIST_NS.equals(qname.getNamespaceURI())) {
contents = StringValue.trimWhitespace(contents);
if (TimerPragma.TIMER_PRAGMA.equals(qname)) {
return new TimerPragma(qname, contents);
}
if (Optimize.OPTIMIZE_PRAGMA.equals(qname)) {
return new Optimize(this, qname, contents, true);
}
if (ForceIndexUse.EXCEPTION_IF_INDEX_NOT_USED_PRAGMA.equals(qname)) {
return new ForceIndexUse(qname, contents);
}
if (ProfilePragma.PROFILING_PRAGMA.equals(qname)) {
return new ProfilePragma(qname, contents);
}
if (NoIndexPragma.NO_INDEX_PRAGMA.equals(qname)) {
return new NoIndexPragma(qname, contents);
}
}
return null;
}
@Override
public DocumentImpl storeTemporaryDoc(final org.exist.dom.memtree.DocumentImpl doc) throws XPathException {
try {
final DocumentImpl targetDoc = getBroker().storeTempResource(doc);
if (targetDoc == null) {
throw new XPathException("Internal error: failed to store temporary doc fragment");
}
LOG.warn("Stored: " + targetDoc.getDocId() + ": " + targetDoc.getURI(), new Throwable());
return targetDoc;
} catch (final EXistException | LockException | PermissionDeniedException e) {
throw new XPathException(TEMP_STORE_ERROR, e);
}
}
@Override
public void setAttribute(final String attribute, final Object value) {
attributes.put(attribute, value);
}
@Override
public Object getAttribute(final String attribute) {
return attributes.get(attribute);
}
/**
* Load the default prefix/namespace mappings table and set up internal functions.
*
* @param config the configuration
*/
@SuppressWarnings("unchecked")
void loadDefaults(final Configuration config) {
this.watchdog = new XQueryWatchDog(this);
/*
SymbolTable syms = broker.getSymbols();
String[] pfx = syms.defaultPrefixList();
namespaces = new HashMap(pfx.length);
prefixes = new HashMap(pfx.length);
String sym;
for (int i = 0; i < pfx.length; i++) {
sym = syms.getDefaultNamespace(pfx[i]);
namespaces.put(pfx[i], sym);
prefixes.put(sym, pfx[i]);
}
*/
loadDefaultNS();
// Switch: enable optimizer
Object param = config.getProperty(PROPERTY_ENABLE_QUERY_REWRITING);
enableOptimizer = (param != null) && "yes".equals(param.toString());
// Switch: Backward compatibility
param = config.getProperty(PROPERTY_XQUERY_BACKWARD_COMPATIBLE);
backwardsCompatible = (param == null) || "yes".equals(param.toString());
// Switch: raiseErrorOnFailedRetrieval
final Boolean option = ((Boolean) config.getProperty(PROPERTY_XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL));
raiseErrorOnFailedRetrieval = (option != null) && option;
// Get map of built-in modules
final Map<String, Class<Module>> builtInModules = (Map) config.getProperty(PROPERTY_BUILT_IN_MODULES);
if (builtInModules != null) {
// Iterate on all map entries
for (final Map.Entry<String, Class<Module>> entry : builtInModules.entrySet()) {
// Get URI and class
final String namespaceURI = entry.getKey();
final Class<Module> moduleClass = entry.getValue();
// first check if the module has already been loaded in the parent context
final Module module = getModule(namespaceURI);
if (module == null) {
// Module does not exist yet, instantiate
instantiateModule(namespaceURI, moduleClass,
(Map<String, Map<String, List<? extends Object>>>) config.getProperty(PROPERTY_MODULE_PARAMETERS));
} else if (getPrefixForURI(module.getNamespaceURI()) == null && !module.getDefaultPrefix().isEmpty()) {
// make sure the namespaces of default modules are known,
// even if they were imported in a parent context
try {
declareNamespace(module.getDefaultPrefix(), module.getNamespaceURI());
} catch (final XPathException e) {
LOG.warn("Internal error while loading default modules: " + e.getMessage(), e);
}
}
}
}
}
/**
* Load default namespaces, e.g. xml, xsi, xdt, fn, local, exist and dbgp.
*/
private void loadDefaultNS() {
try {
// default namespaces
staticNamespaces.put(XML_NS_PREFIX, XML_NS);
staticPrefixes.put(XML_NS, XML_NS_PREFIX);
declareNamespace("xs", Namespaces.SCHEMA_NS);
declareNamespace("xsi", Namespaces.SCHEMA_INSTANCE_NS);
//required for backward compatibility
declareNamespace("xdt", Namespaces.XPATH_DATATYPES_NS);
declareNamespace("fn", Namespaces.XPATH_FUNCTIONS_NS);
declareNamespace("local", Namespaces.XQUERY_LOCAL_NS);
declareNamespace(Namespaces.W3C_XQUERY_XPATH_ERROR_PREFIX, Namespaces.W3C_XQUERY_XPATH_ERROR_NS);
//*not* as standard NS
declareNamespace(Namespaces.EXIST_NS_PREFIX, Namespaces.EXIST_NS);
declareNamespace(Namespaces.EXIST_JAVA_BINDING_NS_PREFIX, Namespaces.EXIST_JAVA_BINDING_NS);
declareNamespace(Namespaces.EXIST_XQUERY_XPATH_ERROR_PREFIX, Namespaces.EXIST_XQUERY_XPATH_ERROR_NS);
//TODO : include "err" namespace ?
declareNamespace("dbgp", Debuggee.NAMESPACE_URI);
} catch (final XPathException e) {
//ignored because it should never happen
if (LOG.isDebugEnabled()) {
LOG.debug(e);
}
}
}
@Override
public void registerUpdateListener(final UpdateListener listener) {
if (updateListener == null) {
updateListener = new ContextUpdateListener();
final DBBroker broker = getBroker();
broker.getBrokerPool().getNotificationService().subscribe(updateListener);
}
updateListener.addListener(listener);
}
protected void clearUpdateListeners() {
if (updateListener != null) {
final DBBroker broker = getBroker();
broker.getBrokerPool().getNotificationService().unsubscribe(updateListener);
}
updateListener = null;
}
@Override
public void checkOptions(final Properties properties) throws XPathException {
checkLegacyOptions(properties);
if (dynamicOptions != null) {
for (final Option option : dynamicOptions) {
if (Namespaces.XSLT_XQUERY_SERIALIZATION_NS.equals(option.getQName().getNamespaceURI())) {
SerializerUtils.setProperty(option.getQName().getLocalPart(), option.getContents(), properties,
inScopeNamespaces::get);
}
}
}
if (staticOptions != null) {
for (final Option option : staticOptions) {
if (Namespaces.XSLT_XQUERY_SERIALIZATION_NS.equals(option.getQName().getNamespaceURI())
&& !properties.containsKey(option.getQName().getLocalPart())) {
SerializerUtils.setProperty(option.getQName().getLocalPart(), option.getContents(), properties,
inScopeNamespaces::get);
}
}
}
}
/**
* Legacy method to check serialization properties set via option exist:serialize.
*
* @param properties the serialization properties
* @throws XPathException if there is an unknown serialization property
*/
private void checkLegacyOptions(final Properties properties) throws XPathException {
final Option pragma = getOption(Option.SERIALIZE_QNAME);
if (pragma == null) {
return;
}
final String[] contents = pragma.tokenizeContents();
for (final String content : contents) {
final String[] pair = Option.parseKeyValuePair(content);
if (pair == null) {
throw new XPathException("Unknown parameter found in " + pragma.getQName().getStringValue()
+ ": '" + content + "'");
}
if (LOG.isDebugEnabled()) {
LOG.debug("Setting serialization property from pragma: " + pair[0] + " = " + pair[1]);
}
properties.setProperty(pair[0], pair[1]);
}
}
@Override
public void setDebuggeeJoint(final DebuggeeJoint joint) {
//XXX: if (debuggeeJoint != null) ???
debuggeeJoint = joint;
}
@Override
public DebuggeeJoint getDebuggeeJoint() {
return debuggeeJoint;
}
@Override
public boolean isDebugMode() {
return debuggeeJoint != null && isVarDeclared(Debuggee.SESSION);
}
@Override
public boolean requireDebugMode() {
return isVarDeclared(Debuggee.SESSION);
}
private Deque<BinaryValue> binaryValueInstances;
void enterEnclosedExpr() {
if (binaryValueInstances != null) {
final Iterator<BinaryValue> it = binaryValueInstances.descendingIterator();
while (it.hasNext()) {
it.next().incrementSharedReferences();
}
}
}
void exitEnclosedExpr() {
if (binaryValueInstances != null) {
final Iterator<BinaryValue> it = binaryValueInstances.iterator();
List<BinaryValue> destroyable = null;
while (it.hasNext()) {
try {
final BinaryValue bv = it.next();
bv.close(); // really just decrements a reference
if (bv.isClosed()) {
if (destroyable == null) {
destroyable = new ArrayList<>();
}
destroyable.add(bv);
}
} catch (final IOException e) {
LOG.warn("Unable to close binary reference on exiting enclosed expression: " + e.getMessage(), e);
}
}
// eagerly cleanup those BinaryValues that are not used outside the EnclosedExpr (to release memory)
if (destroyable != null) {
for (final BinaryValue bvd : destroyable) {
binaryValueInstances.remove(bvd);
}
}
}
}
@Override
public void registerBinaryValueInstance(final BinaryValue binaryValue) {
if (binaryValueInstances == null) {
binaryValueInstances = new ArrayDeque<>();
}
if (cleanupTasks.isEmpty() || cleanupTasks.stream().noneMatch(ct -> ct instanceof BinaryValueCleanupTask)) {
cleanupTasks.add(new BinaryValueCleanupTask());
}
binaryValueInstances.push(binaryValue);
}
/**
* Cleanup Task which is responsible for relasing the streams
* of any {@link BinaryValue} which have been used during
* query execution
*/
public static class BinaryValueCleanupTask implements CleanupTask {
@Override
public void cleanup(final XQueryContext context, final Predicate<Object> predicate) {
if (context.binaryValueInstances != null) {
List<BinaryValue> removable = null;
for (final BinaryValue bv : context.binaryValueInstances) {
try {
if (predicate.test(bv)) {
bv.close();
if (removable == null) {
removable = new ArrayList<>();
}
removable.add(bv);
}
} catch (final IOException e) {
LOG.error("Unable to close binary value: " + e.getMessage(), e);
}
}
if (removable != null) {
for (final BinaryValue bv : removable) {
context.binaryValueInstances.remove(bv);
}
}
}
}
}
@Override
public String getCacheClass() {
return (String) getBroker().getConfiguration().getProperty(Configuration.BINARY_CACHE_CLASS_PROPERTY);
}
public void destroyBinaryValue(final BinaryValue value) {
if (binaryValueInstances != null) {
binaryValueInstances.remove(value);
}
}
public void setXQueryVersion(int version) {
xqueryVersion = version;
}
public int getXQueryVersion() {
return xqueryVersion;
}
@Override
public Source getSource() {
return source;
}
@Override
public void setSource(final Source source) {
this.source = source;
}
/**
* NOTE: the {@link #unsubscribe()} method can be called
* from {@link org.exist.storage.NotificationService#unsubscribe(UpdateListener)}
* by another thread, so this class needs to be thread-safe.
*/
@ThreadSafe
private static class ContextUpdateListener implements UpdateListener {
/*
* We use Concurrent safe data structures here, so that we don't have
* to block any calling threads.
*
* The AtomicReference enables us to quickly clear the listeners
* in #unsubscribe() and maintain happens-before integrity whilst
* unsubcribing them. The CopyOnWriteArrayList allows
* us to add listeners whilst iterating over a snapshot
* of existing iterators in other methods.
*/
private final AtomicReference<List<UpdateListener>> listeners = new AtomicReference<>(new CopyOnWriteArrayList<>());
private void addListener(final UpdateListener listener) {
listeners.get().add(listener);
}
@Override
public void documentUpdated(final DocumentImpl document, final int event) {
listeners.get().forEach(listener -> listener.documentUpdated(document, event));
}
@Override
public void unsubscribe() {
List<UpdateListener> prev = listeners.get();
while (!listeners.compareAndSet(prev, new CopyOnWriteArrayList<>())) {
prev = listeners.get();
}
prev.forEach(UpdateListener::unsubscribe);
}
@Override
public void nodeMoved(final NodeId oldNodeId, final NodeHandle newNode) {
listeners.get().forEach(listener -> listener.nodeMoved(oldNodeId, newNode));
}
@Override
public void debug() {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("XQueryContext: %s document update listeners", listeners.get().size()));
}
listeners.get().forEach(UpdateListener::debug);
}
}
private final List<CleanupTask> cleanupTasks = new ArrayList<>();
public void registerCleanupTask(final CleanupTask cleanupTask) {
cleanupTasks.add(cleanupTask);
}
public interface CleanupTask {
void cleanup(final XQueryContext context, final Predicate<Object> predicate);
}
@Override
public void runCleanupTasks(final Predicate<Object> predicate) {
for (final CleanupTask cleanupTask : cleanupTasks) {
try {
cleanupTask.cleanup(this, predicate);
} catch (final Throwable t) {
LOG.error("Cleaning up XQueryContext: Ignoring: " + t.getMessage(), t);
}
}
// now it is safe to clear the cleanup tasks list as we know they have run
// do not move this anywhere else
cleanupTasks.clear();
}
@Immutable
public static class HttpContext {
private final RequestWrapper request;
private final ResponseWrapper response;
private final SessionWrapper session;
public HttpContext(final RequestWrapper request, final ResponseWrapper response, final SessionWrapper session) {
this.request = request;
this.response = response;
this.session = session;
}
public HttpContext(final RequestWrapper request, final ResponseWrapper response) {
this.request = request;
this.response = response;
this.session = request.getSession(false);
}
public RequestWrapper getRequest() {
return request;
}
public ResponseWrapper getResponse() {
return response;
}
public SessionWrapper getSession() {
return session;
}
/**
* Returns a new HttpContext with the new session set.
*
* The request and response are referenced from this object.
*
* @param newSession the new session to set.
* @return the new HttpContext.
*/
public HttpContext setSession(final SessionWrapper newSession) {
return new HttpContext(request, response, newSession);
}
}
}
|
[optimisation] Don't create unnecessary HashMaps when pushing in scope namespaces
|
exist-core/src/main/java/org/exist/xquery/XQueryContext.java
|
[optimisation] Don't create unnecessary HashMaps when pushing in scope namespaces
|
|
Java
|
apache-2.0
|
2a61bbeb6408c5d368b615366573bf6fcd76e623
| 0
|
eldevanjr/nfe,wmixvideo/nfe,granella/nfe,isaiastavares/nfe,fincatto/nfe,caiocteodoro/nfe,jefperito/nfe,danieldhp/nfe,klutzer/nfe,fauker/nfe
|
package com.fincatto.nfe310.webservices;
import com.fincatto.nfe310.NFeConfig;
import com.fincatto.nfe310.classes.NFUnidadeFederativa;
import com.fincatto.nfe310.classes.cadastro.NFRetornoConsultaCadastro;
import com.fincatto.nfe310.classes.evento.NFEnviaEventoRetorno;
import com.fincatto.nfe310.classes.evento.inutilizacao.NFRetornoEventoInutilizacao;
import com.fincatto.nfe310.classes.lote.consulta.NFLoteConsultaRetorno;
import com.fincatto.nfe310.classes.lote.envio.NFLoteEnvio;
import com.fincatto.nfe310.classes.lote.envio.NFLoteEnvioRetorno;
import com.fincatto.nfe310.classes.lote.envio.NFLoteIndicadorProcessamento;
import com.fincatto.nfe310.classes.nota.consulta.NFNotaConsultaRetorno;
import com.fincatto.nfe310.classes.statusservico.consulta.NFStatusServicoConsultaRetorno;
import com.fincatto.nfe310.validadores.xsd.XMLValidador;
import java.io.IOException;
public class WSFacade {
private final WSLoteEnvio wsLoteEnvio;
private final WSLoteConsulta wsLoteConsulta;
private final WSStatusConsulta wsStatusConsulta;
private final WSNotaConsulta wsNotaConsulta;
private final WSCartaCorrecao wsCartaCorrecao;
private final WSCancelamento wsCancelamento;
private final WSConsultaCadastro wsConsultaCadastro;
private final WSInutilizacao wsInutilizacao;
public WSFacade(final NFeConfig config) throws IOException {
System.setProperty("jdk.tls.client.protocols", "SSLv3,TLSv1");
//System.setProperty("jdk.tls.client.protocols", "TLSv1,TLSv1.1,TLSv1.2");
//System.setProperty("https.protocols", "TLSv1,TLSv1.1,TLSv1.2");
System.setProperty("java.protocol.handler.pkgs", "com.sun.net.ssl.internal.www.protocol");
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
System.setProperty("javax.net.ssl.trustStore", config.getCadeiaCertificados().getAbsolutePath());
System.setProperty("javax.net.ssl.keyStoreType", "PKCS12");
System.setProperty("javax.net.ssl.keyStore", config.getCertificado().getAbsolutePath());
System.setProperty("javax.net.ssl.keyStorePassword", config.getCertificadoSenha());
this.wsLoteEnvio = new WSLoteEnvio(config);
this.wsLoteConsulta = new WSLoteConsulta(config);
this.wsStatusConsulta = new WSStatusConsulta(config);
this.wsNotaConsulta = new WSNotaConsulta(config);
this.wsCartaCorrecao = new WSCartaCorrecao(config);
this.wsCancelamento = new WSCancelamento(config);
this.wsConsultaCadastro = new WSConsultaCadastro(config);
this.wsInutilizacao = new WSInutilizacao(config);
}
public NFLoteEnvioRetorno enviaLote(final NFLoteEnvio lote) throws Exception {
XMLValidador.validaLote(lote.toString());
if (lote.getIndicadorProcessamento().equals(NFLoteIndicadorProcessamento.PROCESSAMENTO_SINCRONO)) {
throw new IllegalStateException("Nao existe ainda a forma de envio sincrona, faca o envio de forma assincrona");
}
return this.wsLoteEnvio.enviaLote(lote);
}
public NFLoteConsultaRetorno consultaLote(final String numeroRecibo) throws Exception {
return this.wsLoteConsulta.consultaLote(numeroRecibo);
}
public NFStatusServicoConsultaRetorno consultaStatus(final NFUnidadeFederativa uf) throws Exception {
return this.wsStatusConsulta.consultaStatus(uf);
}
public NFNotaConsultaRetorno consultaNota(final String chaveDeAcesso) throws Exception {
return this.wsNotaConsulta.consultaNota(chaveDeAcesso);
}
public NFEnviaEventoRetorno corrigeNota(final String chaveDeAcesso, final String textoCorrecao, final int numeroSequencialEvento) throws Exception {
return this.wsCartaCorrecao.corrigeNota(chaveDeAcesso, textoCorrecao, numeroSequencialEvento);
}
public NFEnviaEventoRetorno cancelaNota(final String chaveAcesso, final String numeroProtocolo, final String motivo) throws Exception {
return this.wsCancelamento.cancelaNota(chaveAcesso, numeroProtocolo, motivo);
}
public NFRetornoEventoInutilizacao inutilizaNota(final int anoInutilizacaoNumeracao, final String cnpjEmitente, final String serie, final String numeroInicial, final String numeroFinal, final String justificativa) throws Exception {
return this.wsInutilizacao.inutilizaNota(anoInutilizacaoNumeracao, cnpjEmitente, serie, numeroInicial, numeroFinal, justificativa);
}
public NFRetornoConsultaCadastro consultaCadastro(final String cnpj, final NFUnidadeFederativa uf) throws Exception {
return this.wsConsultaCadastro.consultaCadastro(cnpj, uf);
}
}
|
src/main/java/com/fincatto/nfe310/webservices/WSFacade.java
|
package com.fincatto.nfe310.webservices;
import com.fincatto.nfe310.NFeConfig;
import com.fincatto.nfe310.classes.NFUnidadeFederativa;
import com.fincatto.nfe310.classes.cadastro.NFRetornoConsultaCadastro;
import com.fincatto.nfe310.classes.evento.NFEnviaEventoRetorno;
import com.fincatto.nfe310.classes.evento.inutilizacao.NFRetornoEventoInutilizacao;
import com.fincatto.nfe310.classes.lote.consulta.NFLoteConsultaRetorno;
import com.fincatto.nfe310.classes.lote.envio.NFLoteEnvio;
import com.fincatto.nfe310.classes.lote.envio.NFLoteEnvioRetorno;
import com.fincatto.nfe310.classes.lote.envio.NFLoteIndicadorProcessamento;
import com.fincatto.nfe310.classes.nota.consulta.NFNotaConsultaRetorno;
import com.fincatto.nfe310.classes.statusservico.consulta.NFStatusServicoConsultaRetorno;
import com.fincatto.nfe310.validadores.xsd.XMLValidador;
import java.io.IOException;
public class WSFacade {
private final WSLoteEnvio wsLoteEnvio;
private final WSLoteConsulta wsLoteConsulta;
private final WSStatusConsulta wsStatusConsulta;
private final WSNotaConsulta wsNotaConsulta;
private final WSCartaCorrecao wsCartaCorrecao;
private final WSCancelamento wsCancelamento;
private final WSConsultaCadastro wsConsultaCadastro;
private final WSInutilizacao wsInutilizacao;
public WSFacade(final NFeConfig config) throws IOException {
//System.setProperty("jdk.tls.client.protocols", "SSLv3,TLSv1");
System.setProperty("jdk.tls.client.protocols", "TLSv1,TLSv1.1,TLSv1.2");
System.setProperty("https.protocols", "TLSv1,TLSv1.1,TLSv1.2");
System.setProperty("java.protocol.handler.pkgs", "com.sun.net.ssl.internal.www.protocol");
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
System.setProperty("javax.net.ssl.trustStore", config.getCadeiaCertificados().getAbsolutePath());
System.setProperty("javax.net.ssl.keyStoreType", "PKCS12");
System.setProperty("javax.net.ssl.keyStore", config.getCertificado().getAbsolutePath());
System.setProperty("javax.net.ssl.keyStorePassword", config.getCertificadoSenha());
this.wsLoteEnvio = new WSLoteEnvio(config);
this.wsLoteConsulta = new WSLoteConsulta(config);
this.wsStatusConsulta = new WSStatusConsulta(config);
this.wsNotaConsulta = new WSNotaConsulta(config);
this.wsCartaCorrecao = new WSCartaCorrecao(config);
this.wsCancelamento = new WSCancelamento(config);
this.wsConsultaCadastro = new WSConsultaCadastro(config);
this.wsInutilizacao = new WSInutilizacao(config);
}
public NFLoteEnvioRetorno enviaLote(final NFLoteEnvio lote) throws Exception {
XMLValidador.validaLote(lote.toString());
if (lote.getIndicadorProcessamento().equals(NFLoteIndicadorProcessamento.PROCESSAMENTO_SINCRONO)) {
throw new IllegalStateException("Nao existe ainda a forma de envio sincrona, faca o envio de forma assincrona");
}
return this.wsLoteEnvio.enviaLote(lote);
}
public NFLoteConsultaRetorno consultaLote(final String numeroRecibo) throws Exception {
return this.wsLoteConsulta.consultaLote(numeroRecibo);
}
public NFStatusServicoConsultaRetorno consultaStatus(final NFUnidadeFederativa uf) throws Exception {
return this.wsStatusConsulta.consultaStatus(uf);
}
public NFNotaConsultaRetorno consultaNota(final String chaveDeAcesso) throws Exception {
return this.wsNotaConsulta.consultaNota(chaveDeAcesso);
}
public NFEnviaEventoRetorno corrigeNota(final String chaveDeAcesso, final String textoCorrecao, final int numeroSequencialEvento) throws Exception {
return this.wsCartaCorrecao.corrigeNota(chaveDeAcesso, textoCorrecao, numeroSequencialEvento);
}
public NFEnviaEventoRetorno cancelaNota(final String chaveAcesso, final String numeroProtocolo, final String motivo) throws Exception {
return this.wsCancelamento.cancelaNota(chaveAcesso, numeroProtocolo, motivo);
}
public NFRetornoEventoInutilizacao inutilizaNota(final int anoInutilizacaoNumeracao, final String cnpjEmitente, final String serie, final String numeroInicial, final String numeroFinal, final String justificativa) throws Exception {
return this.wsInutilizacao.inutilizaNota(anoInutilizacaoNumeracao, cnpjEmitente, serie, numeroInicial, numeroFinal, justificativa);
}
public NFRetornoConsultaCadastro consultaCadastro(final String cnpj, final NFUnidadeFederativa uf) throws Exception {
return this.wsConsultaCadastro.consultaCadastro(cnpj, uf);
}
}
|
Deploy da nova versao do nfe para testar novo algoritmo de criptografia.
|
src/main/java/com/fincatto/nfe310/webservices/WSFacade.java
|
Deploy da nova versao do nfe para testar novo algoritmo de criptografia.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.